]> git.pld-linux.org Git - packages/gcc.git/blame - gcc-3.3.1-propolice.patch
- strict internal deps, -g77 R: main (in equal version)
[packages/gcc.git] / gcc-3.3.1-propolice.patch
CommitLineData
abef1cdf 1diff -urN gcc-3.3.1/gcc/Makefile.in gcc-3.3.1-pp/gcc/Makefile.in
a89a5183 2--- gcc-3.3.1/gcc/Makefile.in 2003-08-03 15:48:36.000000000 +0000
abef1cdf 3+++ gcc-3.3.1-pp/gcc/Makefile.in 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
4@@ -387,7 +387,7 @@
5 # Options to use when compiling libgcc2.a.
6 #
7 LIBGCC2_DEBUG_CFLAGS = -g
8-LIBGCC2_CFLAGS = -O2 $(LIBGCC2_INCLUDES) $(GCC_CFLAGS) $(TARGET_LIBGCC2_CFLAGS) $(LIBGCC2_DEBUG_CFLAGS) $(GTHREAD_FLAGS) -DIN_LIBGCC2 -D__GCC_FLOAT_NOT_NEEDED @inhibit_libc@
9+LIBGCC2_CFLAGS = -O2 $(LIBGCC2_INCLUDES) $(GCC_CFLAGS) $(TARGET_LIBGCC2_CFLAGS) $(LIBGCC2_DEBUG_CFLAGS) $(GTHREAD_FLAGS) -DIN_LIBGCC2 -D__GCC_FLOAT_NOT_NEEDED @inhibit_libc@ -fno-stack-protector
10
11 # Additional options to use when compiling libgcc2.a.
12 # Some targets override this to -isystem include
13@@ -764,7 +764,7 @@
14 sibcall.o simplify-rtx.o ssa.o ssa-ccp.o ssa-dce.o stmt.o \
15 stor-layout.o stringpool.o timevar.o toplev.o tracer.o tree.o tree-dump.o \
16 tree-inline.o unroll.o varasm.o varray.o version.o vmsdbgout.o xcoffout.o \
17- et-forest.o $(GGC) $(out_object_file) $(EXTRA_OBJS)
18+ et-forest.o protector.o $(GGC) $(out_object_file) $(EXTRA_OBJS)
19
20 BACKEND = main.o libbackend.a
21
22@@ -798,7 +798,7 @@
23
24 LIB2FUNCS_2 = _floatdixf _fixunsxfsi _fixtfdi _fixunstfdi _floatditf \
25 _clear_cache _trampoline __main _exit _absvsi2 _absvdi2 _addvsi3 \
26- _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
27+ _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors _stack_smash_handler
28
29 # Defined in libgcc2.c, included only in the static library.
30 LIB2FUNCS_ST = _eprintf _bb __gcc_bcmp
31@@ -1410,7 +1410,7 @@
32 ssa.h $(PARAMS_H) $(TM_P_H) reload.h dwarf2asm.h $(TARGET_H) \
33 langhooks.h insn-flags.h options.h cfglayout.h real.h
34 $(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) \
35- -DTARGET_NAME=\"$(target_alias)\" \
36+ -DSTACK_PROTECTOR -DTARGET_NAME=\"$(target_alias)\" \
37 -c $(srcdir)/toplev.c $(OUTPUT_OPTION)
38 main.o : main.c $(CONFIG_H) $(SYSTEM_H) toplev.h
39
40@@ -1665,6 +1665,7 @@
41 output.h except.h $(TM_P_H) real.h
42 params.o : params.c $(CONFIG_H) $(SYSTEM_H) $(PARAMS_H) toplev.h
43 hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) $(HOOKS_H)
44+protector.o: protector.c $(CONFIG_H)
45
46 $(out_object_file): $(out_file) $(CONFIG_H) $(TREE_H) $(GGC_H) \
47 $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
abef1cdf 48diff -urN gcc-3.3.1/gcc/calls.c gcc-3.3.1-pp/gcc/calls.c
a89a5183 49--- gcc-3.3.1/gcc/calls.c 2003-07-18 02:58:25.000000000 +0000
abef1cdf 50+++ gcc-3.3.1-pp/gcc/calls.c 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
51@@ -2327,7 +2327,7 @@
52 /* For variable-sized objects, we must be called with a target
53 specified. If we were to allocate space on the stack here,
54 we would have no way of knowing when to free it. */
55- rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
56+ rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
57
58 mark_temp_addr_taken (d);
59 structure_value_addr = XEXP (d, 0);
abef1cdf
RJ
60diff -urN gcc-3.3.1/gcc/combine.c gcc-3.3.1-pp/gcc/combine.c
61--- gcc-3.3.1/gcc/combine.c 2003-03-24 11:37:32.000000000 +0000
62+++ gcc-3.3.1-pp/gcc/combine.c 2003-09-12 13:40:28.000000000 +0000
63@@ -3859,7 +3859,17 @@
64 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
65 rtx inner_op1 = XEXP (x, 1);
66 rtx inner;
67-
68+
69+#ifndef FRAME_GROWS_DOWNWARD
70+ if (flag_propolice_protection
71+ && code == PLUS
72+ && other == frame_pointer_rtx
73+ && GET_CODE (inner_op0) == CONST_INT
74+ && GET_CODE (inner_op1) == CONST_INT
75+ && INTVAL (inner_op0) > 0
76+ && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
77+ return x;
a89a5183 78+#endif
abef1cdf
RJ
79 /* Make sure we pass the constant operand if any as the second
80 one if this is a commutative operation. */
81 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
82@@ -4272,6 +4282,11 @@
83 they are now checked elsewhere. */
84 if (GET_CODE (XEXP (x, 0)) == PLUS
85 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
86+#ifndef FRAME_GROWS_DOWNWARD
87+ if (! (flag_propolice_protection
88+ && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
89+ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
a89a5183 90+#endif
abef1cdf
RJ
91 return gen_binary (PLUS, mode,
92 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
93 XEXP (x, 1)),
94@@ -4400,7 +4415,10 @@
95
96 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
97 integers. */
98- if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
99+ if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
100+ && (! (flag_propolice_protection
101+ && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
102+ && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
103 return gen_binary (MINUS, mode,
104 gen_binary (MINUS, mode, XEXP (x, 0),
105 XEXP (XEXP (x, 1), 0)),
106diff -urN gcc-3.3.1/gcc/config/t-linux gcc-3.3.1-pp/gcc/config/t-linux
107--- gcc-3.3.1/gcc/config/t-linux 2003-06-04 16:56:11.000000000 +0000
108+++ gcc-3.3.1-pp/gcc/config/t-linux 2003-09-12 13:40:28.000000000 +0000
109@@ -4,7 +4,7 @@
110 # Compile crtbeginS.o and crtendS.o with pic.
111 CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
112 # Compile libgcc2.a with pic.
113-TARGET_LIBGCC2_CFLAGS = -fPIC
114+TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
115
116 # Override t-slibgcc-elf-ver to export some libgcc symbols with
117 # the symbol versions that glibc used.
118diff -urN gcc-3.3.1/gcc/cse.c gcc-3.3.1-pp/gcc/cse.c
119--- gcc-3.3.1/gcc/cse.c 2003-04-29 19:16:40.000000000 +0000
120+++ gcc-3.3.1-pp/gcc/cse.c 2003-09-12 13:40:28.000000000 +0000
121@@ -4288,7 +4288,14 @@
122
123 if (new_const == 0)
124 break;
125-
126+#ifndef FRAME_GROWS_DOWNWARD
127+ if (flag_propolice_protection
128+ && GET_CODE (y) == PLUS
129+ && XEXP (y, 0) == frame_pointer_rtx
130+ && INTVAL (inner_const) > 0
131+ && INTVAL (new_const) <= 0)
132+ break;
a89a5183 133+#endif
abef1cdf
RJ
134 /* If we are associating shift operations, don't let this
135 produce a shift of the size of the object or larger.
136 This could occur when we follow a sign-extend by a right
137@@ -4823,6 +4830,13 @@
138 if (SET_DEST (x) == pc_rtx
139 && GET_CODE (SET_SRC (x)) == LABEL_REF)
140 ;
141+ /* cut the reg propagation of stack-protected argument */
142+ else if (x->volatil) {
143+ rtx x1 = SET_DEST (x);
144+ if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
145+ x1 = SUBREG_REG (x1);
146+ make_new_qty (REGNO (x1), GET_MODE (x1));
a89a5183 147+ }
abef1cdf
RJ
148
149 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
150 The hard function value register is used only once, to copy to
151diff -urN gcc-3.3.1/gcc/explow.c gcc-3.3.1-pp/gcc/explow.c
152--- gcc-3.3.1/gcc/explow.c 2003-04-07 22:58:12.000000000 +0000
153+++ gcc-3.3.1-pp/gcc/explow.c 2003-09-12 13:40:28.000000000 +0000
154@@ -86,7 +86,8 @@
155 rtx tem;
156 int all_constant = 0;
157
158- if (c == 0)
159+ if (c == 0
160+ && !(flag_propolice_protection && x == virtual_stack_vars_rtx))
161 return x;
162
163 restart:
164@@ -187,7 +188,8 @@
165 break;
166 }
167
168- if (c != 0)
169+ if (c != 0
170+ || (flag_propolice_protection && x == virtual_stack_vars_rtx))
171 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
172
173 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
174@@ -531,6 +533,21 @@
175 in certain cases. This is not necessary since the code
176 below can handle all possible cases, but machine-dependent
177 transformations can make better code. */
178+ if (flag_propolice_protection)
a89a5183 179+ {
abef1cdf
RJ
180+#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS \
181+ && XEXP (X, 0) == virtual_stack_vars_rtx \
182+ && GET_CODE (XEXP (X, 1)) == CONST_INT)
183+ rtx y;
184+ if (FRAMEADDR_P (x)) goto win;
185+ for (y=x; y!=0 && GET_CODE (y)==PLUS; y = XEXP (y, 0))
a89a5183 186+ {
abef1cdf
RJ
187+ if (FRAMEADDR_P (XEXP (y, 0)))
188+ XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
189+ if (FRAMEADDR_P (XEXP (y, 1)))
190+ XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
a89a5183 191+ }
a89a5183 192+ }
abef1cdf
RJ
193 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
194
195 /* PLUS and MULT can appear in special ways
196diff -urN gcc-3.3.1/gcc/expr.c gcc-3.3.1-pp/gcc/expr.c
197--- gcc-3.3.1/gcc/expr.c 2003-07-24 19:11:20.000000000 +0000
198+++ gcc-3.3.1-pp/gcc/expr.c 2003-09-12 13:40:28.000000000 +0000
199@@ -45,6 +45,7 @@
200 #include "langhooks.h"
201 #include "intl.h"
202 #include "tm_p.h"
203+#include "protector.h"
204
205 /* Decide whether a function's arguments should be processed
206 from first to last or from last to first.
207@@ -1518,7 +1519,7 @@
208
209 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
210 {
211- data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
212+ data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len-GET_MODE_SIZE (mode)));
213 data.autinc_from = 1;
214 data.explicit_inc_from = -1;
215 }
216@@ -1532,7 +1533,7 @@
217 data.from_addr = copy_addr_to_reg (from_addr);
218 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
219 {
220- data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
221+ data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
222 data.autinc_to = 1;
223 data.explicit_inc_to = -1;
224 }
225@@ -1649,11 +1650,13 @@
226 from1 = adjust_address (data->from, mode, data->offset);
227
228 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
229- emit_insn (gen_add2_insn (data->to_addr,
230- GEN_INT (-(HOST_WIDE_INT)size)));
231+ if (data->explicit_inc_to < -1)
232+ emit_insn (gen_add2_insn (data->to_addr,
233+ GEN_INT (-(HOST_WIDE_INT)size)));
234 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
235- emit_insn (gen_add2_insn (data->from_addr,
236- GEN_INT (-(HOST_WIDE_INT)size)));
237+ if (data->explicit_inc_from < -1)
238+ emit_insn (gen_add2_insn (data->from_addr,
239+ GEN_INT (-(HOST_WIDE_INT)size)));
240
241 if (data->to)
242 emit_insn ((*genfun) (to1, from1));
243@@ -2826,7 +2829,7 @@
244
245 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
246 {
247- data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
248+ data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len-GET_MODE_SIZE (mode)));
249 data->autinc_to = 1;
250 data->explicit_inc_to = -1;
251 }
252@@ -2897,8 +2900,9 @@
253 to1 = adjust_address (data->to, mode, data->offset);
254
255 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
256- emit_insn (gen_add2_insn (data->to_addr,
257- GEN_INT (-(HOST_WIDE_INT) size)));
258+ if (data->explicit_inc_to < -1)
259+ emit_insn (gen_add2_insn (data->to_addr,
260+ GEN_INT (-(HOST_WIDE_INT) size)));
261
262 cst = (*data->constfun) (data->constfundata, data->offset, mode);
263 emit_insn ((*genfun) (to1, cst));
264@@ -5894,7 +5898,9 @@
265 && GET_CODE (XEXP (value, 0)) == PLUS
266 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
267 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
268- && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
269+ && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
270+ && (!flag_propolice_protection
271+ || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
272 {
273 rtx temp = expand_simple_binop (GET_MODE (value), code,
274 XEXP (XEXP (value, 0), 0), op2,
275@@ -8070,7 +8076,8 @@
276 /* If adding to a sum including a constant,
277 associate it to put the constant outside. */
278 if (GET_CODE (op1) == PLUS
279- && CONSTANT_P (XEXP (op1, 1)))
280+ && CONSTANT_P (XEXP (op1, 1))
281+ && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1))))
282 {
283 rtx constant_term = const0_rtx;
284
285diff -urN gcc-3.3.1/gcc/flags.h gcc-3.3.1-pp/gcc/flags.h
286--- gcc-3.3.1/gcc/flags.h 2003-06-20 21:18:41.000000000 +0000
287+++ gcc-3.3.1-pp/gcc/flags.h 2003-09-12 13:40:28.000000000 +0000
288@@ -690,4 +690,12 @@
289 #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
290 (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && !flag_unsafe_math_optimizations)
291
292+/* Nonzero means use propolice as a stack protection method */
a89a5183 293+
abef1cdf 294+extern int flag_propolice_protection;
a89a5183 295+
abef1cdf 296+/* Warn when not issuing stack smashing protection for some reason */
a89a5183 297+
abef1cdf 298+extern int warn_stack_protector;
a89a5183 299+
abef1cdf
RJ
300 #endif /* ! GCC_FLAGS_H */
301diff -urN gcc-3.3.1/gcc/function.c gcc-3.3.1-pp/gcc/function.c
302--- gcc-3.3.1/gcc/function.c 2003-04-10 22:26:04.000000000 +0000
303+++ gcc-3.3.1-pp/gcc/function.c 2003-09-12 13:40:28.000000000 +0000
304@@ -59,6 +59,7 @@
305 #include "tm_p.h"
306 #include "integrate.h"
307 #include "langhooks.h"
308+#include "protector.h"
309
310 #ifndef TRAMPOLINE_ALIGNMENT
311 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
312@@ -142,6 +143,10 @@
313 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
314 in this function. */
315 static GTY(()) varray_type sibcall_epilogue;
a89a5183 316+
abef1cdf
RJ
317+/* Current boundary mark for character arrays. */
318+int temp_boundary_mark = 0;
a89a5183 319+
abef1cdf
RJ
320 \f
321 /* In order to evaluate some expressions, such as function calls returning
322 structures in memory, we need to temporarily allocate stack locations.
323@@ -195,6 +200,8 @@
324 /* The size of the slot, including extra space for alignment. This
325 info is for combine_temp_slots. */
326 HOST_WIDE_INT full_size;
327+ /* Boundary mark of a character array and the others. This info is for propolice */
328+ int boundary_mark;
329 };
330 \f
331 /* This structure is used to record MEMs or pseudos used to replace VAR, any
332@@ -629,6 +636,7 @@
333 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
334 if we are to allocate something at an inner level to be treated as
335 a variable in the block (e.g., a SAVE_EXPR).
336+ KEEP is 5 if we allocate a place to return structure.
337
338 TYPE is the type that will be used for the stack slot. */
339
340@@ -642,6 +650,8 @@
341 unsigned int align;
342 struct temp_slot *p, *best_p = 0;
343 rtx slot;
344+ int char_array = (flag_propolice_protection
345+ && keep == 1 && search_string_def (type));
346
347 /* If SIZE is -1 it means that somebody tried to allocate a temporary
348 of a variable size. */
349@@ -667,7 +677,8 @@
350 && ! p->in_use
351 && objects_must_conflict_p (p->type, type)
352 && (best_p == 0 || best_p->size > p->size
353- || (best_p->size == p->size && best_p->align > p->align)))
354+ || (best_p->size == p->size && best_p->align > p->align))
355+ && (! char_array || p->boundary_mark != 0))
356 {
357 if (p->align == align && p->size == size)
358 {
359@@ -702,6 +713,7 @@
360 p->address = 0;
361 p->rtl_expr = 0;
362 p->type = best_p->type;
363+ p->boundary_mark = best_p->boundary_mark;
364 p->next = temp_slots;
365 temp_slots = p;
366
367@@ -762,6 +774,7 @@
368 p->full_size = frame_offset - frame_offset_old;
369 #endif
370 p->address = 0;
371+ p->boundary_mark = char_array?++temp_boundary_mark:0;
372 p->next = temp_slots;
373 temp_slots = p;
374 }
375@@ -932,14 +945,16 @@
376 int delete_q = 0;
377 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
378 {
379- if (p->base_offset + p->full_size == q->base_offset)
380+ if (p->base_offset + p->full_size == q->base_offset &&
381+ p->boundary_mark == q->boundary_mark)
382 {
383 /* Q comes after P; combine Q into P. */
384 p->size += q->size;
385 p->full_size += q->full_size;
386 delete_q = 1;
387 }
388- else if (q->base_offset + q->full_size == p->base_offset)
389+ else if (q->base_offset + q->full_size == p->base_offset &&
390+ p->boundary_mark == q->boundary_mark)
391 {
392 /* P comes after Q; combine P into Q. */
393 q->size += p->size;
394@@ -1497,7 +1512,9 @@
395 new = func->x_parm_reg_stack_loc[regno];
396
397 if (new == 0)
398- new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
399+ new = function ?
400+ assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func):
401+ assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
402
403 PUT_CODE (reg, MEM);
404 PUT_MODE (reg, decl_mode);
405@@ -3961,7 +3978,8 @@
406 constant with that register. */
407 temp = gen_reg_rtx (Pmode);
408 XEXP (x, 0) = new;
409- if (validate_change (object, &XEXP (x, 1), temp, 0))
410+ if (validate_change (object, &XEXP (x, 1), temp, 0)
411+ && ! flag_propolice_protection)
412 emit_insn_before (gen_move_insn (temp, new_offset), object);
413 else
414 {
415diff -urN gcc-3.3.1/gcc/gcse.c gcc-3.3.1-pp/gcc/gcse.c
416--- gcc-3.3.1/gcc/gcse.c 2003-07-14 09:21:22.000000000 +0000
417+++ gcc-3.3.1-pp/gcc/gcse.c 2003-09-12 13:40:28.000000000 +0000
418@@ -4211,7 +4211,7 @@
419 /* Find an assignment that sets reg_used and is available
420 at the start of the block. */
421 set = find_avail_set (regno, insn);
422- if (! set)
423+ if (! set || set->expr->volatil)
424 continue;
425
426 pat = set->expr;
427diff -urN gcc-3.3.1/gcc/integrate.c gcc-3.3.1-pp/gcc/integrate.c
428--- gcc-3.3.1/gcc/integrate.c 2003-07-15 01:05:43.000000000 +0000
429+++ gcc-3.3.1-pp/gcc/integrate.c 2003-09-12 13:40:28.000000000 +0000
430@@ -401,6 +401,10 @@
431 /* These args would always appear unused, if not for this. */
432 TREE_USED (copy) = 1;
433
434+ /* The inlined variable is marked as INLINE not to sweep by propolice */
435+ if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
436+ DECL_INLINE (copy) = 1;
a89a5183 437+
abef1cdf
RJ
438 /* Set the context for the new declaration. */
439 if (!DECL_CONTEXT (decl))
440 /* Globals stay global. */
441@@ -1965,6 +1969,10 @@
442
443 seq = get_insns ();
444 end_sequence ();
445+#ifdef FRAME_GROWS_DOWNWARD
446+ if (flag_propolice_protection && GET_CODE (seq) == SET)
447+ RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
448+#endif
449 emit_insn_after (seq, map->insns_at_start);
450 return temp;
451 }
452diff -urN gcc-3.3.1/gcc/libgcc-std.ver gcc-3.3.1-pp/gcc/libgcc-std.ver
453--- gcc-3.3.1/gcc/libgcc-std.ver 2003-07-13 21:25:09.000000000 +0000
454+++ gcc-3.3.1-pp/gcc/libgcc-std.ver 2003-09-12 13:40:28.000000000 +0000
455@@ -174,6 +174,10 @@
456 _Unwind_SjLj_RaiseException
457 _Unwind_SjLj_ForcedUnwind
458 _Unwind_SjLj_Resume
a89a5183 459+
abef1cdf
RJ
460+ # stack smash handler symbols
461+ __guard
462+ __stack_smash_handler
463 }
464
465 %inherit GCC_3.3 GCC_3.0
466diff -urN gcc-3.3.1/gcc/libgcc2.c gcc-3.3.1-pp/gcc/libgcc2.c
467--- gcc-3.3.1/gcc/libgcc2.c 2002-10-23 10:47:24.000000000 +0000
468+++ gcc-3.3.1-pp/gcc/libgcc2.c 2003-09-12 13:40:28.000000000 +0000
469@@ -1993,3 +1993,102 @@
470 #endif /* NEED_ATEXIT */
471
472 #endif /* L_exit */
a89a5183 473+\f
abef1cdf
RJ
474+#ifdef L_stack_smash_handler
475+#include <stdio.h>
476+#include <string.h>
477+#include <fcntl.h>
478+#include <unistd.h>
a89a5183 479+
abef1cdf
RJ
480+#ifdef _POSIX_SOURCE
481+#include <signal.h>
a89a5183
RJ
482+#endif
483+
abef1cdf
RJ
484+#if defined(HAVE_SYSLOG)
485+#include <sys/types.h>
486+#include <sys/socket.h>
487+#include <sys/un.h>
a89a5183 488+
abef1cdf
RJ
489+#include <sys/syslog.h>
490+#ifndef _PATH_LOG
491+#define _PATH_LOG "/dev/log"
a89a5183 492+#endif
a89a5183
RJ
493+#endif
494+
abef1cdf
RJ
495+long __guard[8] = {0,0,0,0,0,0,0,0};
496+static void __guard_setup (void) __attribute__ ((constructor)) ;
497+static void __guard_setup (void)
498+{
499+ int fd;
500+ if (__guard[0]!=0) return;
501+ fd = open ("/dev/urandom", 0);
502+ if (fd != -1) {
503+ ssize_t size = read (fd, (char*)&__guard, sizeof(__guard));
504+ close (fd) ;
505+ if (size == sizeof(__guard)) return;
506+ }
507+ /* If a random generator can't be used, the protector switches the guard
508+ to the "terminator canary" */
509+ ((char*)__guard)[0] = 0; ((char*)__guard)[1] = 0;
510+ ((char*)__guard)[2] = '\n'; ((char*)__guard)[3] = 255;
511+}
512+void __stack_smash_handler (char func[], int damaged ATTRIBUTE_UNUSED)
513+{
514+#if defined (__GNU_LIBRARY__)
515+ extern char * __progname;
a89a5183 516+#endif
abef1cdf
RJ
517+ const char message[] = ": stack smashing attack in function ";
518+ int bufsz = 256, len;
519+ char buf[bufsz];
520+#if defined(HAVE_SYSLOG)
521+ int LogFile;
522+ struct sockaddr_un SyslogAddr; /* AF_UNIX address of local logger */
a89a5183 523+#endif
abef1cdf
RJ
524+#ifdef _POSIX_SOURCE
525+ {
526+ sigset_t mask;
527+ sigfillset(&mask);
528+ sigdelset(&mask, SIGABRT); /* Block all signal handlers */
529+ sigprocmask(SIG_BLOCK, &mask, NULL); /* except SIGABRT */
530+ }
a89a5183
RJ
531+#endif
532+
abef1cdf
RJ
533+ strcpy(buf, "<2>"); len=3; /* send LOG_CRIT */
534+#if defined (__GNU_LIBRARY__)
535+ strncat(buf, __progname, bufsz-len-1); len = strlen(buf);
a89a5183 536+#endif
abef1cdf
RJ
537+ if (bufsz>len) {strncat(buf, message, bufsz-len-1); len = strlen(buf);}
538+ if (bufsz>len) {strncat(buf, func, bufsz-len-1); len = strlen(buf);}
a89a5183 539+
abef1cdf
RJ
540+ /* print error message */
541+ write (STDERR_FILENO, buf+3, len-3);
542+#if defined(HAVE_SYSLOG)
543+ if ((LogFile = socket(AF_UNIX, SOCK_DGRAM, 0)) != -1) {
a89a5183 544+
abef1cdf
RJ
545+ /*
546+ * Send "found" message to the "/dev/log" path
547+ */
548+ SyslogAddr.sun_family = AF_UNIX;
549+ (void)strncpy(SyslogAddr.sun_path, _PATH_LOG,
550+ sizeof(SyslogAddr.sun_path) - 1);
551+ SyslogAddr.sun_path[sizeof(SyslogAddr.sun_path) - 1] = '\0';
552+ sendto(LogFile, buf, len, 0, (struct sockaddr *)&SyslogAddr,
553+ sizeof(SyslogAddr));
554+ }
a89a5183
RJ
555+#endif
556+
abef1cdf
RJ
557+#ifdef _POSIX_SOURCE
558+ { /* Make sure the default handler is associated with SIGABRT */
559+ struct sigaction sa;
560+
561+ memset(&sa, 0, sizeof(struct sigaction));
562+ sigfillset(&sa.sa_mask); /* Block all signals */
563+ sa.sa_flags = 0;
564+ sa.sa_handler = SIG_DFL;
565+ sigaction(SIGABRT, &sa, NULL);
566+ (void)kill(getpid(), SIGABRT);
567+ }
a89a5183 568+#endif
abef1cdf 569+ _exit(127);
a89a5183 570+}
abef1cdf
RJ
571+#endif
572diff -urN gcc-3.3.1/gcc/loop.c gcc-3.3.1-pp/gcc/loop.c
573--- gcc-3.3.1/gcc/loop.c 2003-07-11 06:47:05.000000000 +0000
574+++ gcc-3.3.1-pp/gcc/loop.c 2003-09-12 13:40:28.000000000 +0000
575@@ -6516,6 +6516,14 @@
576 if (GET_CODE (*mult_val) == USE)
577 *mult_val = XEXP (*mult_val, 0);
578
579+#ifndef FRAME_GROWS_DOWNWARD
580+ if (flag_propolice_protection
581+ && GET_CODE (*add_val) == PLUS
582+ && (XEXP (*add_val, 0) == frame_pointer_rtx
583+ || XEXP (*add_val, 1) == frame_pointer_rtx))
a89a5183 584+ return 0;
abef1cdf 585+#endif
a89a5183 586+
abef1cdf
RJ
587 if (is_addr)
588 *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
589 else
590diff -urN gcc-3.3.1/gcc/optabs.c gcc-3.3.1-pp/gcc/optabs.c
591--- gcc-3.3.1/gcc/optabs.c 2003-07-19 00:25:25.000000000 +0000
592+++ gcc-3.3.1-pp/gcc/optabs.c 2003-09-12 13:40:28.000000000 +0000
593@@ -703,6 +703,26 @@
594 if (target)
595 target = protect_from_queue (target, 1);
596
597+ if (flag_propolice_protection
598+ && binoptab->code == PLUS
599+ && op0 == virtual_stack_vars_rtx
600+ && GET_CODE(op1) == CONST_INT)
a89a5183 601+ {
abef1cdf
RJ
602+ int icode = (int) binoptab->handlers[(int) mode].insn_code;
603+ if (target)
604+ temp = target;
605+ else
606+ temp = gen_reg_rtx (mode);
a89a5183 607+
abef1cdf
RJ
608+ if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
609+ || GET_CODE (temp) != REG)
610+ temp = gen_reg_rtx (mode);
a89a5183 611+
abef1cdf
RJ
612+ emit_insn (gen_rtx_SET (VOIDmode, temp,
613+ gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
614+ return temp;
615+ }
a89a5183 616+
abef1cdf
RJ
617 if (flag_force_mem)
618 {
619 op0 = force_not_mem (op0);
620diff -urN gcc-3.3.1/gcc/protector.c gcc-3.3.1-pp/gcc/protector.c
a89a5183 621--- gcc-3.3.1/gcc/protector.c 1970-01-01 00:00:00.000000000 +0000
abef1cdf 622+++ gcc-3.3.1-pp/gcc/protector.c 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
623@@ -0,0 +1,2489 @@
624+/* RTL buffer overflow protection function for GNU C compiler
625+ Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
626+
627+This file is part of GCC.
628+
629+GCC is free software; you can redistribute it and/or modify it under
630+the terms of the GNU General Public License as published by the Free
631+Software Foundation; either version 2, or (at your option) any later
632+version.
633+
634+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
635+WARRANTY; without even the implied warranty of MERCHANTABILITY or
636+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
637+for more details.
638+
639+You should have received a copy of the GNU General Public License
640+along with GCC; see the file COPYING. If not, write to the Free
641+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
642+02111-1307, USA. */
643+
644+#include "config.h"
645+#include "system.h"
646+#include "machmode.h"
647+
648+#include "rtl.h"
649+#include "tree.h"
650+#include "regs.h"
651+#include "flags.h"
652+#include "insn-config.h"
653+#include "insn-flags.h"
654+#include "expr.h"
655+#include "output.h"
656+#include "recog.h"
657+#include "hard-reg-set.h"
658+#include "real.h"
659+#include "except.h"
660+#include "function.h"
661+#include "toplev.h"
662+#include "conditions.h"
663+#include "insn-attr.h"
664+#include "c-tree.h"
665+#include "optabs.h"
666+#include "protector.h"
667+
668+
669+void prepare_stack_protection PARAMS ((int inlinable));
670+int search_string_def PARAMS ((tree names));
671+rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
672+
673+
674+/* Warn when not issuing stack smashing protection for some reason */
675+int warn_stack_protector;
676+
677+/* Round a value to the lowest integer less than it that is a multiple of
678+ the required alignment. Avoid using division in case the value is
679+ negative. Assume the alignment is a power of two. */
680+#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
681+
682+/* Similar, but round to the next highest integer that meets the
683+ alignment. */
684+#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
685+
686+
687+/* Nonzero means use propolice as a stack protection method */
688+extern int flag_propolice_protection;
689+
690+/* This file contains several memory arrangement functions to protect
691+ the return address and the frame pointer of the stack
692+ from a stack-smashing attack. It also
693+ provides the function that protects pointer variables. */
694+
695+/* Nonzero if function being compiled can define string buffers that may be
696+ damaged by the stack-smash attack */
697+static int current_function_defines_vulnerable_string;
698+static int current_function_defines_short_string;
699+static int current_function_has_variable_string;
700+static int current_function_defines_vsized_array;
701+static int current_function_is_inlinable;
702+
703+static rtx guard_area, _guard;
704+static rtx function_first_insn, prologue_insert_point;
705+
706+/* */
707+static HOST_WIDE_INT sweep_frame_offset;
708+static HOST_WIDE_INT push_allocated_offset = 0;
709+static HOST_WIDE_INT push_frame_offset = 0;
710+static int saved_cse_not_expected = 0;
711+
712+static int search_string_from_argsandvars PARAMS ((int caller));
713+static int search_string_from_local_vars PARAMS ((tree block));
714+static int search_pointer_def PARAMS ((tree names));
715+static int search_func_pointer PARAMS ((tree type, int mark));
716+static void reset_used_flags_for_insns PARAMS ((rtx insn));
717+static void reset_used_flags_for_decls PARAMS ((tree block));
718+static void reset_used_flags_of_plus PARAMS ((rtx x));
719+static void rtl_prologue PARAMS ((rtx insn));
720+static void rtl_epilogue PARAMS ((rtx fnlastinsn));
721+static void arrange_var_order PARAMS ((tree blocks));
722+static void copy_args_for_protection PARAMS ((void));
723+static void sweep_string_variable PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
724+static void sweep_string_in_decls PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
725+static void sweep_string_in_args PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
726+static void sweep_string_use_of_insns PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
727+static void sweep_string_in_operand PARAMS ((rtx insn, rtx *loc, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
728+static void move_arg_location PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
729+static void change_arg_use_of_insns PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
730+static void change_arg_use_of_insns_2 PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
731+static void change_arg_use_in_operand PARAMS ((rtx x, rtx orig, rtx *new, HOST_WIDE_INT size));
732+static void validate_insns_of_varrefs PARAMS ((rtx insn));
733+static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
734+
735+#define SUSPICIOUS_BUF_SIZE 8
736+
737+#define AUTO_BASEPTR(X) \
738+ (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
739+#define AUTO_OFFSET(X) \
740+ (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
741+#undef PARM_PASSED_IN_MEMORY
742+#define PARM_PASSED_IN_MEMORY(PARM) \
743+ (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
744+#define VIRTUAL_STACK_VARS_P(X) \
745+ ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
746+
747+
748+
749+void
750+prepare_stack_protection (inlinable)
751+ int inlinable;
752+{
753+ tree blocks = DECL_INITIAL (current_function_decl);
754+ current_function_is_inlinable = inlinable && !flag_no_inline;
755+ push_frame_offset = push_allocated_offset = 0;
756+ saved_cse_not_expected = 0;
757+
758+ /*
759+ skip the protection if the function has no block or it is an inline function
760+ */
761+ if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
762+ if (! blocks || current_function_is_inlinable) return;
763+
764+ current_function_defines_vulnerable_string = search_string_from_argsandvars (0);
765+
766+ if (current_function_defines_vulnerable_string)
767+ {
768+ HOST_WIDE_INT offset;
769+ function_first_insn = get_insns ();
770+
771+ if (current_function_contains_functions) {
772+ if (warn_stack_protector)
773+ warning ("not protecting function: it contains functions");
774+ return;
775+ }
776+
777+ /* Initialize recognition, indicating that volatile is OK. */
778+ init_recog ();
779+
780+ sweep_frame_offset = 0;
781+
782+#ifdef STACK_GROWS_DOWNWARD
783+ /*
784+ frame_offset: offset to end of allocated area of stack frame.
785+ It is defined in the function.c
786+ */
787+
788+ /* the location must be before buffers */
789+ guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
790+ PUT_MODE (guard_area, GUARD_m);
791+ MEM_VOLATILE_P (guard_area) = 1;
792+
793+#ifndef FRAME_GROWS_DOWNWARD
794+ sweep_frame_offset = frame_offset;
795+#endif
796+
797+ /* For making room for guard value, scan all insns and fix the offset address
798+ of the variable that is based on frame pointer.
799+ Scan all declarations of variables and fix the offset address of the variable that
800+ is based on the frame pointer */
801+ sweep_string_variable (guard_area, UNITS_PER_GUARD);
802+
803+
804+ /* the location of guard area moves to the beginning of stack frame */
805+ if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
806+ XEXP (XEXP (guard_area, 0), 1) = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
807+
808+
809+ /* Insert prologue rtl instructions */
810+ rtl_prologue (function_first_insn);
811+
812+ if (! current_function_has_variable_string)
813+ {
814+ /* Generate argument saving instruction */
815+ copy_args_for_protection ();
816+
817+#ifndef FRAME_GROWS_DOWNWARD
818+ /* If frame grows upward, character string copied from an arg stays top of
819+ the guard variable. So sweep the guard variable again */
820+ sweep_frame_offset = CEIL_ROUND (frame_offset, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
821+ sweep_string_variable (guard_area, UNITS_PER_GUARD);
822+#endif
823+ }
824+ else if (warn_stack_protector)
825+ warning ("not protecting variables: it has a variable length buffer");
826+#endif
827+#ifndef FRAME_GROWS_DOWNWARD
828+ if (STARTING_FRAME_OFFSET == 0)
829+ {
830+ /* this may be only for alpha */
831+ push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
832+ assign_stack_local (BLKmode, push_allocated_offset, -1);
833+ sweep_frame_offset = frame_offset;
834+ sweep_string_variable (const0_rtx, -push_allocated_offset);
835+ sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
836+ }
837+#endif
838+
839+ /* Arrange the order of local variables */
840+ arrange_var_order (blocks);
841+
842+#ifdef STACK_GROWS_DOWNWARD
843+ /* Insert epilogue rtl instructions */
844+ rtl_epilogue (get_last_insn ());
845+#endif
846+ init_recog_no_volatile ();
847+ }
848+ else if (current_function_defines_short_string
849+ && warn_stack_protector)
850+ warning ("not protecting function: buffer is less than %d bytes long",
851+ SUSPICIOUS_BUF_SIZE);
852+}
853+
854+/*
855+ search string from arguments and local variables
856+ caller: 0 means call from protector_stack_protection
857+ 1 means call from push_frame
858+*/
859+static int
860+search_string_from_argsandvars (caller)
861+ int caller;
862+{
863+ tree blocks, parms;
864+ int string_p;
865+
866+ /* saves a latest search result as a cached infomation */
867+ static tree __latest_search_decl = 0;
868+ static int __latest_search_result = FALSE;
869+
870+ if (__latest_search_decl == current_function_decl)
871+ return __latest_search_result;
872+ else if (caller) return FALSE;
873+ __latest_search_decl = current_function_decl;
874+ __latest_search_result = TRUE;
875+
876+ current_function_defines_short_string = FALSE;
877+ current_function_has_variable_string = FALSE;
878+ current_function_defines_vsized_array = FALSE;
879+
880+ /*
881+ search a string variable from local variables
882+ */
883+ blocks = DECL_INITIAL (current_function_decl);
884+ string_p = search_string_from_local_vars (blocks);
885+
886+ if (!current_function_defines_vsized_array && current_function_calls_alloca)
887+ {
888+ current_function_has_variable_string = TRUE;
889+ return TRUE;
890+ }
891+
892+ if (string_p) return TRUE;
893+
894+#ifdef STACK_GROWS_DOWNWARD
895+ /*
896+ search a string variable from arguments
897+ */
898+ parms = DECL_ARGUMENTS (current_function_decl);
899+
900+ for (; parms; parms = TREE_CHAIN (parms))
901+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
902+ {
903+ if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
904+ {
905+ string_p = search_string_def (TREE_TYPE(parms));
906+ if (string_p) return TRUE;
907+ }
908+ }
909+#endif
910+
911+ __latest_search_result = FALSE;
912+ return FALSE;
913+}
914+
915+
916+static int
917+search_string_from_local_vars (block)
918+ tree block;
919+{
920+ tree types;
921+ int found = FALSE;
922+
923+ while (block && TREE_CODE(block)==BLOCK)
924+ {
925+ types = BLOCK_VARS(block);
926+
927+ while (types)
928+ {
929+ /* skip the declaration that refers an external variable */
930+ /* name: types.decl.name.identifier.id */
931+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
932+ && TREE_CODE (types) == VAR_DECL
933+ && ! DECL_ARTIFICIAL (types)
934+ && DECL_RTL_SET_P (types)
935+ && GET_CODE (DECL_RTL (types)) == MEM)
936+ {
937+ if (search_string_def (TREE_TYPE (types)))
938+ {
939+ rtx home = DECL_RTL (types);
940+
941+ if (GET_CODE (home) == MEM
942+ && (GET_CODE (XEXP (home, 0)) == MEM
943+ || (GET_CODE (XEXP (home, 0)) == REG
944+ && XEXP (home, 0) != virtual_stack_vars_rtx
945+ && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
946+ && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
947+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
948+ && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
949+#endif
950+ )))
951+ /* If the value is indirect by memory or by a register
952+ that isn't the frame pointer
953+ then it means the object is variable-sized and address through
954+ that register or stack slot. The protection has no way to hide pointer variables
955+ behind the array, so all we can do is staying the order of variables and arguments. */
956+ {
957+ current_function_has_variable_string = TRUE;
958+ }
959+
960+ /* found character array */
961+ found = TRUE;
962+ }
963+ }
964+
965+ types = TREE_CHAIN(types);
966+ }
967+
968+ if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
969+ {
970+ found = TRUE;
971+ }
972+
973+ block = BLOCK_CHAIN (block);
974+ }
975+
976+ return found;
977+}
978+
979+
980+/*
981+ * search a character array from the specified type tree
982+ */
983+int
984+search_string_def (type)
985+ tree type;
986+{
987+ tree tem;
988+
989+ if (! type)
990+ return FALSE;
991+
992+ switch (TREE_CODE (type))
993+ {
994+ case ARRAY_TYPE:
995+ /* Check if the array is a variable-sized array */
996+ if (TYPE_DOMAIN (type) == 0
997+ || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
998+ && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
999+ current_function_defines_vsized_array = TRUE;
1000+
1001+ /* TREE_CODE( TREE_TYPE(type) ) == INTEGER_TYPE */
1002+ if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1003+ || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1004+ || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1005+ {
1006+ /* Check if the string is a variable string */
1007+ if (TYPE_DOMAIN (type) == 0
1008+ || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1009+ && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1010+ return TRUE;
1011+
1012+ /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
1013+ if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1014+ && TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1 >= SUSPICIOUS_BUF_SIZE)
1015+ return TRUE;
1016+
1017+ current_function_defines_short_string = TRUE;
1018+ }
1019+ return search_string_def(TREE_TYPE(type));
1020+
1021+ case UNION_TYPE:
1022+ case QUAL_UNION_TYPE:
1023+ case RECORD_TYPE:
1024+ /* Output the name, type, position (in bits), size (in bits) of each
1025+ field. */
1026+ for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1027+ {
1028+ /* Omit here local type decls until we know how to support them. */
1029+ if ((TREE_CODE (tem) == TYPE_DECL)
1030+ || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1031+ continue;
1032+
1033+ if (search_string_def(TREE_TYPE(tem))) return TRUE;
1034+ }
1035+ break;
1036+
1037+ case POINTER_TYPE:
1038+ case REFERENCE_TYPE:
1039+ /* I'm not sure whether OFFSET_TYPE needs this treatment,
1040+ so I'll play safe and return 1. */
1041+ case OFFSET_TYPE:
1042+ default:
1043+ break;
1044+ }
1045+
1046+ return FALSE;
1047+}
1048+
1049+/*
1050+ * examine whether the input contains frame pointer addressing
1051+ */
1052+int
1053+contains_fp (op)
1054+ rtx op;
1055+{
1056+ register enum rtx_code code;
1057+ rtx x;
1058+ int i, j;
1059+ const char *fmt;
1060+
1061+ x = op;
1062+ if (x == 0)
1063+ return FALSE;
1064+
1065+ code = GET_CODE (x);
1066+
1067+ switch (code)
1068+ {
1069+ case CONST_INT:
1070+ case CONST_DOUBLE:
1071+ case CONST:
1072+ case SYMBOL_REF:
1073+ case CODE_LABEL:
1074+ case REG:
1075+ case ADDRESSOF:
1076+ return FALSE;
1077+
1078+ case PLUS:
1079+ if (XEXP (x, 0) == virtual_stack_vars_rtx
1080+ && CONSTANT_P (XEXP (x, 1)))
1081+ return TRUE;
1082+
1083+ default:
1084+ break;
1085+ }
1086+
1087+ /* Scan all subexpressions. */
1088+ fmt = GET_RTX_FORMAT (code);
1089+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1090+ if (*fmt == 'e')
1091+ {
1092+ if (contains_fp (XEXP (x, i))) return TRUE;
1093+ }
1094+ else if (*fmt == 'E')
1095+ for (j = 0; j < XVECLEN (x, i); j++)
1096+ if (contains_fp (XVECEXP (x, i, j))) return TRUE;
1097+
1098+ return FALSE;
1099+}
1100+
1101+
1102+static int
1103+search_pointer_def (type)
1104+ tree type;
1105+{
1106+ tree tem;
1107+
1108+ if (! type)
1109+ return FALSE;
1110+
1111+ switch (TREE_CODE (type))
1112+ {
1113+ case UNION_TYPE:
1114+ case QUAL_UNION_TYPE:
1115+ case RECORD_TYPE:
1116+ /* Output the name, type, position (in bits), size (in bits) of each
1117+ field. */
1118+ for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1119+ {
1120+ /* Omit here local type decls until we know how to support them. */
1121+ if ((TREE_CODE (tem) == TYPE_DECL)
1122+ || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1123+ continue;
1124+
1125+ if (search_pointer_def (TREE_TYPE(tem))) return TRUE;
1126+ }
1127+ break;
1128+
1129+ case ARRAY_TYPE:
1130+ return search_pointer_def (TREE_TYPE(type));
1131+
1132+ case POINTER_TYPE:
1133+ case REFERENCE_TYPE:
1134+ /* I'm not sure whether OFFSET_TYPE needs this treatment,
1135+ so I'll play safe and return 1. */
1136+ case OFFSET_TYPE:
1137+ if (TYPE_READONLY (TREE_TYPE (type)))
1138+ {
1139+ int funcp = search_func_pointer (TREE_TYPE (type), 1);
1140+ /* Un-mark the type as having been visited already */
1141+ search_func_pointer (TREE_TYPE (type), 0);
1142+ return funcp;
1143+ }
1144+ return TRUE;
1145+
1146+ default:
1147+ break;
1148+ }
1149+
1150+ return FALSE;
1151+}
1152+
1153+
1154+static int
1155+search_func_pointer (type, mark)
1156+ tree type;
1157+ int mark;
1158+{
1159+ tree tem;
1160+
1161+ if (! type)
1162+ return FALSE;
1163+
1164+ switch (TREE_CODE (type))
1165+ {
1166+ case UNION_TYPE:
1167+ case QUAL_UNION_TYPE:
1168+ case RECORD_TYPE:
1169+ if (TREE_ASM_WRITTEN (type) != mark)
1170+ {
1171+ /* mark the type as having been visited already */
1172+ TREE_ASM_WRITTEN (type) = mark;
1173+
1174+ /* Output the name, type, position (in bits), size (in bits) of
1175+ each field. */
1176+ for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1177+ {
1178+ /* Omit here local type decls until we know how to support them. */
1179+ if (TREE_CODE (tem) == FIELD_DECL
1180+ && search_func_pointer (TREE_TYPE(tem), mark)) return TRUE;
1181+ }
1182+ }
1183+ break;
1184+
1185+ case ARRAY_TYPE:
1186+ return search_func_pointer (TREE_TYPE(type), mark);
1187+
1188+ case POINTER_TYPE:
1189+ case REFERENCE_TYPE:
1190+ /* I'm not sure whether OFFSET_TYPE needs this treatment,
1191+ so I'll play safe and return 1. */
1192+ case OFFSET_TYPE:
1193+ return TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE;
1194+
1195+ default:
1196+ break;
1197+ }
1198+
1199+ return FALSE;
1200+}
1201+
1202+
1203+static void
1204+reset_used_flags_for_insns (insn)
1205+ rtx insn;
1206+{
1207+ register int i, j;
1208+ register enum rtx_code code;
1209+ register const char *format_ptr;
1210+
1211+ for (; insn; insn = NEXT_INSN (insn))
1212+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1213+ || GET_CODE (insn) == CALL_INSN)
1214+ {
1215+ code = GET_CODE (insn);
1216+ insn->used = 0;
1217+ format_ptr = GET_RTX_FORMAT (code);
1218+
1219+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
1220+ {
1221+ switch (*format_ptr++) {
1222+ case 'e':
1223+ reset_used_flags_of_plus (XEXP (insn, i));
1224+ break;
1225+
1226+ case 'E':
1227+ for (j = 0; j < XVECLEN (insn, i); j++)
1228+ reset_used_flags_of_plus (XVECEXP (insn, i, j));
1229+ break;
1230+ }
1231+ }
1232+ }
1233+}
1234+
1235+static void
1236+reset_used_flags_for_decls (block)
1237+ tree block;
1238+{
1239+ tree types;
1240+ rtx home;
1241+
1242+ while (block && TREE_CODE(block)==BLOCK)
1243+ {
1244+ types = BLOCK_VARS(block);
1245+
1246+ while (types)
1247+ {
1248+ /* skip the declaration that refers an external variable and
1249+ also skip an global variable */
1250+ if (! DECL_EXTERNAL (types))
1251+ {
1252+ if (!DECL_RTL_SET_P (types)) goto next;
1253+ home = DECL_RTL (types);
1254+
1255+ if (GET_CODE (home) == MEM
1256+ && GET_CODE (XEXP (home, 0)) == PLUS
1257+ && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1258+ {
1259+ XEXP (home, 0)->used = 0;
1260+ }
1261+ }
1262+ next:
1263+ types = TREE_CHAIN(types);
1264+ }
1265+
1266+ reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
1267+
1268+ block = BLOCK_CHAIN (block);
1269+ }
1270+}
1271+
1272+/* Clear the USED bits only of type PLUS in X */
1273+
1274+static void
1275+reset_used_flags_of_plus (x)
1276+ rtx x;
1277+{
1278+ register int i, j;
1279+ register enum rtx_code code;
1280+ register const char *format_ptr;
1281+
1282+ if (x == 0)
1283+ return;
1284+
1285+ code = GET_CODE (x);
1286+
1287+ /* These types may be freely shared so we needn't do any resetting
1288+ for them. */
1289+
1290+ switch (code)
1291+ {
1292+ case REG:
1293+ case QUEUED:
1294+ case CONST_INT:
1295+ case CONST_DOUBLE:
1296+ case SYMBOL_REF:
1297+ case CODE_LABEL:
1298+ case PC:
1299+ case CC0:
1300+ return;
1301+
1302+ case INSN:
1303+ case JUMP_INSN:
1304+ case CALL_INSN:
1305+ case NOTE:
1306+ case LABEL_REF:
1307+ case BARRIER:
1308+ /* The chain of insns is not being copied. */
1309+ return;
1310+
1311+ case PLUS:
1312+ x->used = 0;
1313+ break;
1314+
1315+ case CALL_PLACEHOLDER:
1316+ reset_used_flags_for_insns (XEXP (x, 0));
1317+ reset_used_flags_for_insns (XEXP (x, 1));
1318+ reset_used_flags_for_insns (XEXP (x, 2));
1319+ break;
1320+
1321+ default:
1322+ break;
1323+ }
1324+
1325+ format_ptr = GET_RTX_FORMAT (code);
1326+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
1327+ {
1328+ switch (*format_ptr++)
1329+ {
1330+ case 'e':
1331+ reset_used_flags_of_plus (XEXP (x, i));
1332+ break;
1333+
1334+ case 'E':
1335+ for (j = 0; j < XVECLEN (x, i); j++)
1336+ reset_used_flags_of_plus (XVECEXP (x, i, j));
1337+ break;
1338+ }
1339+ }
1340+}
1341+
1342+
1343+static void
1344+rtl_prologue (insn)
1345+ rtx insn;
1346+{
1347+#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
1348+#undef HAS_INIT_SECTION
1349+#define HAS_INIT_SECTION
1350+#endif
1351+
1352+ rtx _val;
1353+
1354+ for (; insn; insn = NEXT_INSN (insn))
1355+ if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1356+ break;
1357+
1358+#if !defined (HAS_INIT_SECTION)
1359+ /* If this function is `main', skip a call to `__main'
1360+ to run guard instruments after global initializers, etc. */
1361+ if (DECL_NAME (current_function_decl)
1362+ && MAIN_NAME_P (DECL_NAME (current_function_decl))
1363+ && DECL_CONTEXT (current_function_decl) == NULL_TREE)
1364+ {
1365+ rtx fbinsn = insn;
1366+ for (; insn; insn = NEXT_INSN (insn))
1367+ if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
1368+ break;
1369+ if (insn == 0) insn = fbinsn;
1370+ }
1371+#endif
1372+
1373+ prologue_insert_point = NEXT_INSN (insn); /* mark the next insn of FUNCTION_BEG insn */
1374+
1375+ start_sequence ();
1376+
1377+ _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
1378+ emit_move_insn ( guard_area, _guard);
1379+
1380+ _val = get_insns ();
1381+ end_sequence ();
1382+
1383+ emit_insn_before (_val, prologue_insert_point);
1384+}
1385+
1386+static void
1387+rtl_epilogue (insn)
1388+ rtx insn;
1389+{
1390+ rtx if_false_label;
1391+ rtx _val;
1392+ rtx funcname;
1393+ tree funcstr;
1394+ int flag_have_return = FALSE;
1395+
1396+ start_sequence ();
1397+
1398+#ifdef HAVE_return
1399+ if (HAVE_return)
1400+ {
1401+ rtx insn;
1402+ return_label = gen_label_rtx ();
1403+
1404+ for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
1405+ if (GET_CODE (insn) == JUMP_INSN
1406+ && GET_CODE (PATTERN (insn)) == RETURN
1407+ && GET_MODE (PATTERN (insn)) == VOIDmode)
1408+ {
1409+ rtx pat = gen_rtx_SET (VOIDmode,
1410+ pc_rtx,
1411+ gen_rtx_LABEL_REF (VOIDmode,
1412+ return_label));
1413+ PATTERN (insn) = pat;
1414+ flag_have_return = TRUE;
1415+ }
1416+
1417+
1418+ emit_label (return_label);
1419+ }
1420+#endif
1421+
1422+ compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX); /* if (guard_area != _guard) */
1423+
1424+ if_false_label = gen_label_rtx (); /* { */
1425+ emit_jump_insn ( gen_beq(if_false_label));
1426+
1427+ /* generate string for the current function name */
1428+ funcstr = build_string (strlen(current_function_name)+1, current_function_name);
1429+ TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);/* = char_array_type_node;*/
1430+ funcname = output_constant_def (funcstr, 1);
1431+
1432+ emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
1433+ 0, VOIDmode, 2,
1434+ XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
1435+
1436+ /* generate RTL to return from the current function */
1437+
1438+ emit_barrier (); /* } */
1439+ emit_label (if_false_label);
1440+
1441+ /* generate RTL to return from the current function */
1442+ if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
1443+ use_return_register ();
1444+
1445+#ifdef HAVE_return
1446+ if (HAVE_return && flag_have_return)
1447+ {
1448+ emit_jump_insn (gen_return ());
1449+ emit_barrier ();
1450+ }
1451+#endif
1452+
1453+ _val = get_insns ();
1454+ end_sequence ();
1455+
1456+ emit_insn_after (_val, insn);
1457+}
1458+
1459+
1460+static void
1461+arrange_var_order (block)
1462+ tree block;
1463+{
1464+ tree types;
1465+ HOST_WIDE_INT offset;
1466+
1467+ while (block && TREE_CODE(block)==BLOCK)
1468+ {
1469+ types = BLOCK_VARS (block);
1470+
1471+ while (types)
1472+ {
1473+ /* skip the declaration that refers an external variable */
1474+ /* name: types.decl.assembler_name.id */
1475+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1476+ && TREE_CODE (types) == VAR_DECL
1477+ && ! DECL_ARTIFICIAL (types)
1478+ && ! DECL_INLINE (types) /* don't sweep inlined string */
1479+ && DECL_RTL_SET_P (types)
1480+ && GET_CODE (DECL_RTL (types)) == MEM)
1481+ {
1482+ if (search_string_def (TREE_TYPE (types)))
1483+ {
1484+ rtx home = DECL_RTL (types);
1485+
1486+ if (! (GET_CODE (home) == MEM
1487+ && (GET_CODE (XEXP (home, 0)) == MEM
1488+ || (GET_CODE (XEXP (home, 0)) == REG
1489+ && XEXP (home, 0) != virtual_stack_vars_rtx
1490+ && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1491+ && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1492+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1493+ && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1494+#endif
1495+ ))))
1496+ {
1497+ /* found a string variable */
1498+ HOST_WIDE_INT var_size =
1499+ ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1500+ / BITS_PER_UNIT);
1501+
1502+ if (GET_MODE (DECL_RTL (types)) == BLKmode)
1503+ {
1504+ int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1505+ var_size = CEIL_ROUND (var_size, alignment);
1506+ }
1507+
1508+ /* skip the variable if it is top of the region
1509+ specified by sweep_frame_offset */
1510+ offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1511+ if (offset == sweep_frame_offset - var_size)
1512+ sweep_frame_offset -= var_size;
1513+
1514+ else if (offset < sweep_frame_offset - var_size)
1515+ sweep_string_variable (DECL_RTL (types), var_size);
1516+ }
1517+ }
1518+ }
1519+
1520+ types = TREE_CHAIN(types);
1521+ }
1522+
1523+ arrange_var_order (BLOCK_SUBBLOCKS (block));
1524+
1525+ block = BLOCK_CHAIN (block);
1526+ }
1527+}
1528+
1529+
1530+static void
1531+copy_args_for_protection ()
1532+{
1533+ tree parms = DECL_ARGUMENTS (current_function_decl);
1534+ rtx temp_rtx;
1535+
1536+ parms = DECL_ARGUMENTS (current_function_decl);
1537+ for (; parms; parms = TREE_CHAIN (parms))
1538+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1539+ {
1540+ if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1541+ {
1542+ int string_p;
1543+
1544+ /*
1545+ skip arguemnt protection if the last argument is used
1546+ for the variable argument
1547+ */
1548+ /*
1549+ tree fntype;
1550+ if (TREE_CHAIN (parms) == 0)
1551+ {
1552+ fntype = TREE_TYPE (current_function_decl);
1553+
1554+ if ((TYPE_ARG_TYPES (fntype) != 0 &&
1555+ TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) != void_type_node)
1556+ || current_function_varargs)
1557+ continue;
1558+ }
1559+ */
1560+
1561+ string_p = search_string_def (TREE_TYPE(parms));
1562+
1563+ /* check if it is a candidate to move */
1564+ if (string_p || search_pointer_def (TREE_TYPE (parms)))
1565+ {
1566+ int arg_size
1567+ = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1568+ / BITS_PER_UNIT);
1569+
1570+ start_sequence ();
1571+
1572+ if (GET_CODE (DECL_RTL (parms)) == REG)
1573+ {
1574+ rtx safe = 0;
1575+
1576+ change_arg_use_of_insns (prologue_insert_point, DECL_RTL (parms), &safe, 0);
1577+ if (safe)
1578+ {
1579+ /* generate codes for copying the content */
1580+ rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1581+
1582+ /* avoid register elimination in gcse.c (COPY-PROP)*/
1583+ PATTERN (movinsn)->volatil = 1;
1584+
1585+ /* save debugger info */
1586+ DECL_INCOMING_RTL (parms) = safe;
1587+ }
1588+ }
1589+
1590+ else if (GET_CODE (DECL_RTL (parms)) == MEM
1591+ && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1592+ {
1593+ rtx movinsn;
1594+ rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1595+
1596+ /* generate codes for copying the content */
1597+ movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1598+ PATTERN (movinsn)->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/
1599+
1600+ /* change the addressof information to the newly allocated pseudo register */
1601+ emit_move_insn (DECL_RTL (parms), safe);
1602+
1603+ /* save debugger info */
1604+ DECL_INCOMING_RTL (parms) = safe;
1605+ }
1606+
1607+ else
1608+ {
1609+ /* declare temporary local variable DECL_NAME (parms) for it */
1610+ temp_rtx
1611+ = assign_stack_local (DECL_MODE (parms), arg_size,
1612+ DECL_MODE (parms) == BLKmode ? -1 : 0);
1613+
1614+ MEM_IN_STRUCT_P (temp_rtx) = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1615+ set_mem_alias_set (temp_rtx, get_alias_set (parms));
1616+
1617+ /* generate codes for copying the content */
1618+ store_expr (parms, temp_rtx, 0);
1619+
1620+ /* change the reference for each instructions */
1621+ move_arg_location (prologue_insert_point, DECL_RTL (parms),
1622+ temp_rtx, arg_size);
1623+
1624+ /* change the location of parms variable */
1625+ SET_DECL_RTL (parms, temp_rtx);
1626+
1627+ /* change debugger info */
1628+ DECL_INCOMING_RTL (parms) = temp_rtx;
1629+ }
1630+
1631+ emit_insn_before (get_insns (), prologue_insert_point);
1632+ end_sequence ();
1633+
1634+#ifdef FRAME_GROWS_DOWNWARD
1635+ /* process the string argument */
1636+ if (string_p && DECL_MODE (parms) == BLKmode)
1637+ {
1638+ int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1639+ arg_size = CEIL_ROUND (arg_size, alignment);
1640+
1641+ /* change the reference for each instructions */
1642+ sweep_string_variable (DECL_RTL (parms), arg_size);
1643+ }
1644+#endif
1645+ }
1646+ }
1647+ }
1648+}
1649+
1650+
1651+/*
1652+ sweep a string variable to the local variable addressed by sweep_frame_offset, that is
1653+ a last position of string variables.
1654+*/
1655+static void
1656+sweep_string_variable (sweep_var, var_size)
1657+ rtx sweep_var;
1658+ HOST_WIDE_INT var_size;
1659+{
1660+ HOST_WIDE_INT sweep_offset;
1661+
1662+ switch (GET_CODE (sweep_var))
1663+ {
1664+ case MEM:
1665+ if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1666+ && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1667+ return;
1668+ sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1669+ break;
1670+ case CONST_INT:
1671+ sweep_offset = INTVAL (sweep_var);
1672+ break;
1673+ default:
1674+ abort ();
1675+ }
1676+
1677+ /* scan all declarations of variables and fix the offset address of
1678+ the variable based on the frame pointer */
1679+ sweep_string_in_decls (DECL_INITIAL (current_function_decl), sweep_offset, var_size);
1680+
1681+ /* scan all argument variable and fix the offset address based on the frame pointer */
1682+ sweep_string_in_args (DECL_ARGUMENTS (current_function_decl), sweep_offset, var_size);
1683+
1684+ /* For making room for sweep variable, scan all insns and fix the offset address
1685+ of the variable that is based on frame pointer*/
1686+ sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1687+
1688+
1689+ /* Clear all the USED bits in operands of all insns and declarations of local vars */
1690+ reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1691+ reset_used_flags_for_insns (function_first_insn);
1692+
1693+ sweep_frame_offset -= var_size;
1694+}
1695+
1696+
1697+
1698+/*
1699+ move an argument to the local variable addressed by frame_offset
1700+*/
1701+static void
1702+move_arg_location (insn, orig, new, var_size)
1703+ rtx insn, orig, new;
1704+ HOST_WIDE_INT var_size;
1705+{
1706+ /* For making room for sweep variable, scan all insns and fix the offset address
1707+ of the variable that is based on frame pointer*/
1708+ change_arg_use_of_insns (insn, orig, &new, var_size);
1709+
1710+
1711+ /* Clear all the USED bits in operands of all insns and declarations of local vars */
1712+ reset_used_flags_for_insns (insn);
1713+}
1714+
1715+
1716+static void
1717+sweep_string_in_decls (block, sweep_offset, sweep_size)
1718+ tree block;
1719+ HOST_WIDE_INT sweep_offset, sweep_size;
1720+{
1721+ tree types;
1722+ HOST_WIDE_INT offset;
1723+ rtx home;
1724+
1725+ while (block && TREE_CODE(block)==BLOCK)
1726+ {
1727+ types = BLOCK_VARS(block);
1728+
1729+ while (types)
1730+ {
1731+ /* skip the declaration that refers an external variable and
1732+ also skip an global variable */
1733+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1734+
1735+ if (!DECL_RTL_SET_P (types)) goto next;
1736+ home = DECL_RTL (types);
1737+
1738+ /* process for static local variable */
1739+ if (GET_CODE (home) == MEM
1740+ && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1741+ goto next;
1742+
1743+ if (GET_CODE (home) == MEM
1744+ && XEXP (home, 0) == virtual_stack_vars_rtx)
1745+ {
1746+ offset = 0;
1747+
1748+ /* the operand related to the sweep variable */
1749+ if (sweep_offset <= offset
1750+ && offset < sweep_offset + sweep_size)
1751+ {
1752+ offset = sweep_frame_offset - sweep_size - sweep_offset;
1753+
1754+ XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1755+ XEXP (home, 0)->used = 1;
1756+ }
1757+ else if (sweep_offset <= offset
1758+ && offset < sweep_frame_offset)
1759+ { /* the rest of variables under sweep_frame_offset, so shift the location */
1760+ XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, -sweep_size);
1761+ XEXP (home, 0)->used = 1;
1762+ }
1763+ }
1764+
1765+ if (GET_CODE (home) == MEM
1766+ && GET_CODE (XEXP (home, 0)) == MEM)
1767+ {
1768+ /* process for dynamically allocated aray */
1769+ home = XEXP (home, 0);
1770+ }
1771+
1772+ if (GET_CODE (home) == MEM
1773+ && GET_CODE (XEXP (home, 0)) == PLUS
1774+ && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1775+ && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1776+ {
1777+ if (! XEXP (home, 0)->used)
1778+ {
1779+ offset = AUTO_OFFSET(XEXP (home, 0));
1780+
1781+ /* the operand related to the sweep variable */
1782+ if (sweep_offset <= offset
1783+ && offset < sweep_offset + sweep_size)
1784+ {
1785+
1786+ offset += sweep_frame_offset - sweep_size - sweep_offset;
1787+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1788+
1789+ /* mark */
1790+ XEXP (home, 0)->used = 1;
1791+ }
1792+ else if (sweep_offset <= offset
1793+ && offset < sweep_frame_offset)
1794+ { /* the rest of variables under sweep_frame_offset,
1795+ so shift the location */
1796+
1797+ XEXP (XEXP (home, 0), 1)
1798+ = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1799+
1800+ /* mark */
1801+ XEXP (home, 0)->used = 1;
1802+ }
1803+ }
1804+ }
1805+
1806+ }
1807+ next:
1808+ types = TREE_CHAIN(types);
1809+ }
1810+
1811+ sweep_string_in_decls (BLOCK_SUBBLOCKS (block), sweep_offset, sweep_size);
1812+ block = BLOCK_CHAIN (block);
1813+ }
1814+}
1815+
1816+
1817+static void
1818+sweep_string_in_args (parms, sweep_offset, sweep_size)
1819+ tree parms;
1820+ HOST_WIDE_INT sweep_offset, sweep_size;
1821+{
1822+ rtx home;
1823+ HOST_WIDE_INT offset;
1824+
1825+ for (; parms; parms = TREE_CHAIN (parms))
1826+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1827+ {
1828+ if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1829+ {
1830+ home = DECL_INCOMING_RTL (parms);
1831+
1832+ if (XEXP (home, 0)->used) continue;
1833+
1834+ offset = AUTO_OFFSET(XEXP (home, 0));
1835+
1836+ /* the operand related to the sweep variable */
1837+ if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1838+ {
1839+ if (sweep_offset <= offset
1840+ && offset < sweep_offset + sweep_size)
1841+ {
1842+ offset += sweep_frame_offset - sweep_size - sweep_offset;
1843+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1844+
1845+ /* mark */
1846+ XEXP (home, 0)->used = 1;
1847+ }
1848+ else if (sweep_offset <= offset
1849+ && offset < sweep_frame_offset)
1850+ { /* the rest of variables under sweep_frame_offset, so shift the location */
1851+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1852+
1853+ /* mark */
1854+ XEXP (home, 0)->used = 1;
1855+ }
1856+ }
1857+ }
1858+ }
1859+}
1860+
1861+
1862+static int has_virtual_reg;
1863+
1864+static void
1865+sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
1866+ rtx insn;
1867+ HOST_WIDE_INT sweep_offset, sweep_size;
1868+{
1869+ for (; insn; insn = NEXT_INSN (insn))
1870+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1871+ || GET_CODE (insn) == CALL_INSN)
1872+ {
1873+ has_virtual_reg = FALSE;
1874+ sweep_string_in_operand (insn, &PATTERN (insn), sweep_offset, sweep_size);
1875+ sweep_string_in_operand (insn, &REG_NOTES (insn), sweep_offset, sweep_size);
1876+ }
1877+}
1878+
1879+
1880+static void
1881+sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
1882+ rtx insn, *loc;
1883+ HOST_WIDE_INT sweep_offset, sweep_size;
1884+{
1885+ register rtx x = *loc;
1886+ register enum rtx_code code;
1887+ int i, j, k = 0;
1888+ HOST_WIDE_INT offset;
1889+ const char *fmt;
1890+
1891+ if (x == 0)
1892+ return;
1893+
1894+ code = GET_CODE (x);
1895+
1896+ switch (code)
1897+ {
1898+ case CONST_INT:
1899+ case CONST_DOUBLE:
1900+ case CONST:
1901+ case SYMBOL_REF:
1902+ case CODE_LABEL:
1903+ case PC:
1904+ case CC0:
1905+ case ASM_INPUT:
1906+ case ADDR_VEC:
1907+ case ADDR_DIFF_VEC:
1908+ case RETURN:
1909+ case ADDRESSOF:
1910+ return;
1911+
1912+ case REG:
1913+ if (x == virtual_incoming_args_rtx
1914+ || x == virtual_stack_vars_rtx
1915+ || x == virtual_stack_dynamic_rtx
1916+ || x == virtual_outgoing_args_rtx
1917+ || x == virtual_cfa_rtx)
1918+ has_virtual_reg = TRUE;
1919+ return;
1920+
1921+ case SET:
1922+ /*
1923+ skip setjmp setup insn and setjmp restore insn
1924+ Example:
1925+ (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1926+ (set (virtual_stack_vars_rtx) (REG))
1927+ */
1928+ if (GET_CODE (XEXP (x, 0)) == MEM
1929+ && XEXP (x, 1) == virtual_stack_vars_rtx)
1930+ return;
1931+ if (XEXP (x, 0) == virtual_stack_vars_rtx
1932+ && GET_CODE (XEXP (x, 1)) == REG)
1933+ return;
1934+ break;
1935+
1936+ case PLUS:
1937+ /* Handle typical case of frame register plus constant. */
1938+ if (XEXP (x, 0) == virtual_stack_vars_rtx
1939+ && CONSTANT_P (XEXP (x, 1)))
1940+ {
1941+ if (x->used) goto single_use_of_virtual_reg;
1942+
1943+ offset = AUTO_OFFSET(x);
1944+ if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
1945+
1946+ /* the operand related to the sweep variable */
1947+ if (sweep_offset <= offset + k
1948+ && offset + k < sweep_offset + sweep_size)
1949+ {
1950+ offset += sweep_frame_offset - sweep_size - sweep_offset;
1951+
1952+ XEXP (x, 0) = virtual_stack_vars_rtx;
1953+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1954+ x->used = 1;
1955+ }
1956+ else if (sweep_offset <= offset + k
1957+ && offset + k < sweep_frame_offset)
1958+ { /* the rest of variables under sweep_frame_offset, so shift the location */
1959+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1960+ x->used = 1;
1961+ }
1962+
1963+ single_use_of_virtual_reg:
1964+ if (has_virtual_reg) {
1965+ /* excerpt from insn_invalid_p in recog.c */
1966+ int icode = recog_memoized (insn);
1967+
1968+ if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1969+ {
1970+ rtx temp, seq;
1971+
1972+ start_sequence ();
1973+ temp = force_operand (x, NULL_RTX);
1974+ seq = get_insns ();
1975+ end_sequence ();
1976+
1977+ emit_insn_before (seq, insn);
1978+ if (! validate_change (insn, loc, temp, 0)
1979+ && ! validate_replace_rtx (x, temp, insn))
1980+ fatal_insn ("sweep_string_in_operand", insn);
1981+ }
1982+ }
1983+
1984+ has_virtual_reg = TRUE;
1985+ return;
1986+ }
1987+
1988+#ifdef FRAME_GROWS_DOWNWARD
1989+ /*
1990+ alert the case of frame register plus constant given by reg.
1991+ */
1992+ else if (XEXP (x, 0) == virtual_stack_vars_rtx
1993+ && GET_CODE (XEXP (x, 1)) == REG)
1994+ fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1995+#endif
1996+
1997+ /*
1998+ process further subtree:
1999+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2000+ (const_int 5))
2001+ */
2002+ break;
2003+
2004+ case CALL_PLACEHOLDER:
2005+ sweep_string_use_of_insns (XEXP (x, 0), sweep_offset, sweep_size);
2006+ sweep_string_use_of_insns (XEXP (x, 1), sweep_offset, sweep_size);
2007+ sweep_string_use_of_insns (XEXP (x, 2), sweep_offset, sweep_size);
2008+ break;
2009+
2010+ default:
2011+ break;
2012+ }
2013+
2014+ /* Scan all subexpressions. */
2015+ fmt = GET_RTX_FORMAT (code);
2016+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2017+ if (*fmt == 'e')
2018+ {
2019+ /*
2020+ virtual_stack_vars_rtx without offset
2021+ Example:
2022+ (set (reg:SI xx) (reg:SI 78))
2023+ (set (reg:SI xx) (MEM (reg:SI 78)))
2024+ */
2025+ if (XEXP (x, i) == virtual_stack_vars_rtx)
2026+ fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
2027+ sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
2028+ }
2029+ else if (*fmt == 'E')
2030+ for (j = 0; j < XVECLEN (x, i); j++)
2031+ sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
2032+}
2033+
2034+
2035+/*
2036+ change a argument variable to the local variable addressed by the "new" variable.
2037+*/
2038+static int flag_caui_exit;
2039+
2040+static void
2041+change_arg_use_of_insns (insn, orig, new, size)
2042+ rtx insn, orig, *new;
2043+ HOST_WIDE_INT size;
2044+{
2045+ flag_caui_exit = FALSE;
2046+ change_arg_use_of_insns_2 (insn, orig, new, size);
2047+}
2048+
2049+static void
2050+change_arg_use_of_insns_2 (insn, orig, new, size)
2051+ rtx insn, orig, *new;
2052+ HOST_WIDE_INT size;
2053+{
2054+ for (; insn && !flag_caui_exit; insn = NEXT_INSN (insn))
2055+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2056+ || GET_CODE (insn) == CALL_INSN)
2057+ {
2058+ change_arg_use_in_operand (PATTERN (insn), orig, new, size);
2059+ }
2060+}
2061+
2062+
2063+
2064+static void
2065+change_arg_use_in_operand (x, orig, new, size)
2066+ rtx x, orig, *new;
2067+ HOST_WIDE_INT size;
2068+{
2069+ register enum rtx_code code;
2070+ int i, j;
2071+ HOST_WIDE_INT offset;
2072+ const char *fmt;
2073+
2074+ if (x == 0)
2075+ return;
2076+
2077+ code = GET_CODE (x);
2078+
2079+ switch (code)
2080+ {
2081+ case CONST_INT:
2082+ case CONST_DOUBLE:
2083+ case CONST:
2084+ case SYMBOL_REF:
2085+ case CODE_LABEL:
2086+ case PC:
2087+ case CC0:
2088+ case ASM_INPUT:
2089+ case ADDR_VEC:
2090+ case ADDR_DIFF_VEC:
2091+ case RETURN:
2092+ case REG:
2093+ case ADDRESSOF:
2094+ return;
2095+
2096+ case MEM:
2097+ /* Handle special case of MEM (incoming_args) */
2098+ if (GET_CODE (orig) == MEM
2099+ && XEXP (x, 0) == virtual_incoming_args_rtx)
2100+ {
2101+ offset = 0;
2102+
2103+ /* the operand related to the sweep variable */
2104+ if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2105+ offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2106+
2107+ offset = AUTO_OFFSET(XEXP (*new, 0))
2108+ + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2109+
2110+ XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
2111+ XEXP (x, 0)->used = 1;
2112+
2113+ return;
2114+ }
2115+ }
2116+ break;
2117+
2118+ case PLUS:
2119+ /* Handle special case of frame register plus constant. */
2120+ if (GET_CODE (orig) == MEM /* skip if orig is register variable in the optimization */
2121+ && XEXP (x, 0) == virtual_incoming_args_rtx && CONSTANT_P (XEXP (x, 1))
2122+ && ! x->used)
2123+ {
2124+ offset = AUTO_OFFSET(x);
2125+
2126+ /* the operand related to the sweep variable */
2127+ if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2128+ offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2129+
2130+ offset = AUTO_OFFSET(XEXP (*new, 0))
2131+ + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2132+
2133+ XEXP (x, 0) = virtual_stack_vars_rtx;
2134+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2135+ x->used = 1;
2136+
2137+ return;
2138+ }
2139+
2140+ /*
2141+ process further subtree:
2142+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2143+ (const_int 5))
2144+ */
2145+ }
2146+ break;
2147+
2148+ case SET:
2149+ /* Handle special case of "set (REG or MEM) (incoming_args)".
2150+ It means that the the address of the 1st argument is stored. */
2151+ if (GET_CODE (orig) == MEM
2152+ && XEXP (x, 1) == virtual_incoming_args_rtx)
2153+ {
2154+ offset = 0;
2155+
2156+ /* the operand related to the sweep variable */
2157+ if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2158+ offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2159+
2160+ offset = AUTO_OFFSET(XEXP (*new, 0))
2161+ + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2162+
2163+ XEXP (x, 1) = plus_constant (virtual_stack_vars_rtx, offset);
2164+ XEXP (x, 1)->used = 1;
2165+
2166+ return;
2167+ }
2168+ }
2169+ break;
2170+
2171+ case CALL_PLACEHOLDER:
2172+ change_arg_use_of_insns_2 (XEXP (x, 0), orig, new, size); if (flag_caui_exit) return;
2173+ change_arg_use_of_insns_2 (XEXP (x, 1), orig, new, size); if (flag_caui_exit) return;
2174+ change_arg_use_of_insns_2 (XEXP (x, 2), orig, new, size); if (flag_caui_exit) return;
2175+ break;
2176+
2177+ default:
2178+ break;
2179+ }
2180+
2181+ if (*new == 0
2182+ && code == SET
2183+ && SET_SRC (x) == orig
2184+ && GET_CODE (SET_DEST (x)) == REG)
2185+ {
2186+ /* exit to the change_arg_use_of_insns */
2187+ flag_caui_exit = TRUE;
2188+ x->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/
2189+ return;
2190+ }
2191+
2192+ /* Scan all subexpressions. */
2193+ fmt = GET_RTX_FORMAT (code);
2194+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2195+ if (*fmt == 'e')
2196+ {
2197+ if (XEXP (x, i) == orig)
2198+ {
2199+ if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
2200+ XEXP (x, i) = *new;
2201+ continue;
2202+ }
2203+ change_arg_use_in_operand (XEXP (x, i), orig, new, size);
2204+ }
2205+ else if (*fmt == 'E')
2206+ for (j = 0; j < XVECLEN (x, i); j++)
2207+ {
2208+
2209+ if (XVECEXP (x, i, j) == orig)
2210+ {
2211+ if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
2212+ XVECEXP (x, i, j) = *new;
2213+ continue;
2214+ }
2215+ change_arg_use_in_operand (XVECEXP (x, i, j), orig, new, size);
2216+ }
2217+}
2218+
2219+
2220+static void
2221+validate_insns_of_varrefs (insn)
2222+ rtx insn;
2223+{
2224+ rtx next;
2225+
2226+ /* Initialize recognition, indicating that volatile is OK. */
2227+ init_recog ();
2228+
2229+ for (; insn; insn = next)
2230+ {
2231+ next = NEXT_INSN (insn);
2232+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2233+ || GET_CODE (insn) == CALL_INSN)
2234+ {
2235+ /* excerpt from insn_invalid_p in recog.c */
2236+ int icode = recog_memoized (insn);
2237+
2238+ if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2239+ validate_operand_of_varrefs (insn, &PATTERN (insn));
2240+ }
2241+ }
2242+
2243+ init_recog_no_volatile ();
2244+}
2245+
2246+
2247+static void
2248+validate_operand_of_varrefs (insn, loc)
2249+ rtx insn, *loc;
2250+{
2251+ register enum rtx_code code;
2252+ rtx x, temp, seq;
2253+ int i, j;
2254+ const char *fmt;
2255+
2256+ x = *loc;
2257+ if (x == 0)
2258+ return;
2259+
2260+ code = GET_CODE (x);
2261+
2262+ switch (code)
2263+ {
2264+ case USE:
2265+ case CONST_INT:
2266+ case CONST_DOUBLE:
2267+ case CONST:
2268+ case SYMBOL_REF:
2269+ case CODE_LABEL:
2270+ case PC:
2271+ case CC0:
2272+ case ASM_INPUT:
2273+ case ADDR_VEC:
2274+ case ADDR_DIFF_VEC:
2275+ case RETURN:
2276+ case REG:
2277+ case ADDRESSOF:
2278+ return;
2279+
2280+ case PLUS:
2281+ /* validate insn of frame register plus constant. */
2282+ if (GET_CODE (x) == PLUS
2283+ && XEXP (x, 0) == virtual_stack_vars_rtx
2284+ && CONSTANT_P (XEXP (x, 1)))
2285+ {
2286+ start_sequence ();
2287+ /* temp = force_operand (x, NULL_RTX); */
2288+ { /* excerpt from expand_binop in optabs.c */
2289+ optab binoptab = add_optab;
2290+ enum machine_mode mode = GET_MODE (x);
2291+ int icode = (int) binoptab->handlers[(int) mode].insn_code;
2292+ enum machine_mode mode1 = insn_data[icode].operand[2].mode;
2293+ rtx pat;
2294+ rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
2295+ temp = gen_reg_rtx (mode);
2296+
2297+ /* Now, if insn's predicates don't allow offset operands, put them into
2298+ pseudo regs. */
2299+
2300+ if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
2301+ && mode1 != VOIDmode)
2302+ xop1 = copy_to_mode_reg (mode1, xop1);
2303+
2304+ pat = GEN_FCN (icode) (temp, xop0, xop1);
2305+ if (pat)
2306+ emit_insn (pat);
2307+ }
2308+ seq = get_insns ();
2309+ end_sequence ();
2310+
2311+ emit_insn_before (seq, insn);
2312+ if (! validate_change (insn, loc, temp, 0))
2313+ abort ();
2314+ return;
2315+ }
2316+ break;
2317+
2318+
2319+ case CALL_PLACEHOLDER:
2320+ validate_insns_of_varrefs (XEXP (x, 0));
2321+ validate_insns_of_varrefs (XEXP (x, 1));
2322+ validate_insns_of_varrefs (XEXP (x, 2));
2323+ break;
2324+
2325+ default:
2326+ break;
2327+ }
2328+
2329+ /* Scan all subexpressions. */
2330+ fmt = GET_RTX_FORMAT (code);
2331+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2332+ if (*fmt == 'e')
2333+ validate_operand_of_varrefs (insn, &XEXP (x, i));
2334+ else if (*fmt == 'E')
2335+ for (j = 0; j < XVECLEN (x, i); j++)
2336+ validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
2337+}
2338+
2339+
2340+
2341+
2342+/*
2343+ The following codes are invoked after the instantiation of pseuso registers.
2344+
2345+ Reorder local variables to place a peudo register after buffers to avoid
2346+ the corruption of local variables that could be used to further corrupt
2347+ arbitrary memory locations.
2348+*/
2349+#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2350+static void push_frame PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
2351+static void push_frame_in_decls PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2352+static void push_frame_in_args PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2353+static void push_frame_of_insns PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2354+static void push_frame_in_operand PARAMS ((rtx insn, rtx orig, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2355+static void push_frame_of_reg_equiv_memory_loc PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2356+static void push_frame_of_reg_equiv_constant PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2357+static void reset_used_flags_for_push_frame PARAMS ((void));
2358+static int check_out_of_frame_access PARAMS ((rtx insn, HOST_WIDE_INT boundary));
2359+static int check_out_of_frame_access_in_operand PARAMS ((rtx, HOST_WIDE_INT boundary));
2360+#endif
2361+
2362+rtx
2363+assign_stack_local_for_pseudo_reg (mode, size, align)
2364+ enum machine_mode mode;
2365+ HOST_WIDE_INT size;
2366+ int align;
2367+{
2368+#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
2369+ return assign_stack_local (mode, size, align);
2370+#else
2371+ tree blocks = DECL_INITIAL (current_function_decl);
2372+ rtx new;
2373+ HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
2374+ int first_call_from_purge_addressof, first_call_from_global_alloc;
2375+
2376+ if (! flag_propolice_protection
2377+ || size == 0
2378+ || ! blocks
2379+ || current_function_is_inlinable
2380+ || ! search_string_from_argsandvars (1)
2381+ || current_function_contains_functions)
2382+ return assign_stack_local (mode, size, align);
2383+
2384+ first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2385+ first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2386+ saved_cse_not_expected = cse_not_expected;
2387+
2388+ starting_frame = (STARTING_FRAME_OFFSET)?STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2389+ units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2390+ GET_MODE_SIZE (mode));
2391+
2392+ if (first_call_from_purge_addressof)
2393+ {
2394+ push_frame_offset = push_allocated_offset;
2395+ if (check_out_of_frame_access (get_insns (), starting_frame))
2396+ {
2397+ /* if there is an access beyond frame, push dummy region to seperate
2398+ the address of instantiated variables */
2399+ push_frame (GET_MODE_SIZE (DImode), 0);
2400+ assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2401+ }
2402+ }
2403+
2404+ if (first_call_from_global_alloc)
2405+ {
2406+ push_frame_offset = push_allocated_offset = 0;
2407+ if (check_out_of_frame_access (get_insns (), starting_frame))
2408+ {
2409+ if (STARTING_FRAME_OFFSET)
2410+ {
2411+ /* if there is an access beyond frame, push dummy region
2412+ to seperate the address of instantiated variables */
2413+ push_frame (GET_MODE_SIZE (DImode), 0);
2414+ assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2415+ }
2416+ else
2417+ push_allocated_offset = starting_frame;
2418+ }
2419+ }
2420+
2421+ saved_frame_offset = frame_offset;
2422+ frame_offset = push_frame_offset;
2423+
2424+ new = assign_stack_local (mode, size, align);
2425+
2426+ push_frame_offset = frame_offset;
2427+ frame_offset = saved_frame_offset;
2428+
2429+ if (push_frame_offset > push_allocated_offset)
2430+ {
2431+ push_frame (units_per_push, push_allocated_offset + STARTING_FRAME_OFFSET);
2432+
2433+ assign_stack_local (BLKmode, units_per_push, -1);
2434+ push_allocated_offset += units_per_push;
2435+ }
2436+
2437+ /* At the second call from global alloc, alpha push frame and assign
2438+ a local variable to the top of the stack */
2439+ if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2440+ push_frame_offset = push_allocated_offset = 0;
2441+
2442+ return new;
2443+#endif
2444+}
2445+
2446+
2447+#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2448+/*
2449+ push frame infomation for instantiating pseudo register at the top of stack.
2450+ This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2451+ not defined.
2452+
2453+ It is called by purge_addressof function and global_alloc (or reload)
2454+ function.
2455+*/
2456+static void
2457+push_frame (var_size, boundary)
2458+ HOST_WIDE_INT var_size, boundary;
2459+{
2460+ reset_used_flags_for_push_frame();
2461+
2462+ /* scan all declarations of variables and fix the offset address of the variable based on the frame pointer */
2463+ push_frame_in_decls (DECL_INITIAL (current_function_decl), var_size, boundary);
2464+
2465+ /* scan all argument variable and fix the offset address based on the frame pointer */
2466+ push_frame_in_args (DECL_ARGUMENTS (current_function_decl), var_size, boundary);
2467+
2468+ /* scan all operands of all insns and fix the offset address based on the frame pointer */
2469+ push_frame_of_insns (get_insns (), var_size, boundary);
2470+
2471+ /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
2472+ push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2473+ push_frame_of_reg_equiv_constant (var_size, boundary);
2474+
2475+ reset_used_flags_for_push_frame();
2476+}
2477+
2478+static void
2479+reset_used_flags_for_push_frame()
2480+{
2481+ int i;
2482+ extern rtx *reg_equiv_memory_loc;
2483+ extern rtx *reg_equiv_constant;
2484+
2485+ /* Clear all the USED bits in operands of all insns and declarations of local vars */
2486+ reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2487+ reset_used_flags_for_insns (get_insns ());
2488+
2489+
2490+ /* The following codes are processed if the push_frame is called from
2491+ global_alloc (or reload) function */
2492+ if (reg_equiv_memory_loc == 0) return;
2493+
2494+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2495+ if (reg_equiv_memory_loc[i])
2496+ {
2497+ rtx x = reg_equiv_memory_loc[i];
2498+
2499+ if (GET_CODE (x) == MEM
2500+ && GET_CODE (XEXP (x, 0)) == PLUS
2501+ && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2502+ {
2503+ /* reset */
2504+ XEXP (x, 0)->used = 0;
2505+ }
2506+ }
2507+
2508+
2509+ if (reg_equiv_constant == 0) return;
2510+
2511+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2512+ if (reg_equiv_constant[i])
2513+ {
2514+ rtx x = reg_equiv_constant[i];
2515+
2516+ if (GET_CODE (x) == PLUS
2517+ && AUTO_BASEPTR (x) == frame_pointer_rtx)
2518+ {
2519+ /* reset */
2520+ x->used = 0;
2521+ }
2522+ }
2523+}
2524+
2525+static void
2526+push_frame_in_decls (block, push_size, boundary)
2527+ tree block;
2528+ HOST_WIDE_INT push_size, boundary;
2529+{
2530+ tree types;
2531+ HOST_WIDE_INT offset;
2532+ rtx home;
2533+
2534+ while (block && TREE_CODE(block)==BLOCK)
2535+ {
2536+ types = BLOCK_VARS(block);
2537+
2538+ while (types)
2539+ {
2540+ /* skip the declaration that refers an external variable and
2541+ also skip an global variable */
2542+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2543+ {
2544+
2545+ if (!DECL_RTL_SET_P (types)) goto next;
2546+ home = DECL_RTL (types);
2547+
2548+ /* process for static local variable */
2549+ if (GET_CODE (home) == MEM
2550+ && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2551+ goto next;
2552+
2553+ if (GET_CODE (home) == MEM
2554+ && GET_CODE (XEXP (home, 0)) == REG)
2555+ {
2556+ if (XEXP (home, 0) != frame_pointer_rtx
2557+ || boundary != 0)
2558+ goto next;
2559+
2560+ XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2561+ push_size);
2562+
2563+ /* mark */
2564+ XEXP (home, 0)->used = 1;
2565+ }
2566+
2567+ if (GET_CODE (home) == MEM
2568+ && GET_CODE (XEXP (home, 0)) == MEM)
2569+ {
2570+
2571+ /* process for dynamically allocated aray */
2572+ home = XEXP (home, 0);
2573+ }
2574+
2575+ if (GET_CODE (home) == MEM
2576+ && GET_CODE (XEXP (home, 0)) == PLUS
2577+ && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2578+ {
2579+ offset = AUTO_OFFSET(XEXP (home, 0));
2580+
2581+ if (! XEXP (home, 0)->used
2582+ && offset >= boundary)
2583+ {
2584+ offset += push_size;
2585+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2586+
2587+ /* mark */
2588+ XEXP (home, 0)->used = 1;
2589+ }
2590+ }
2591+
2592+ }
2593+ next:
2594+ types = TREE_CHAIN(types);
2595+ }
2596+
2597+ push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2598+ block = BLOCK_CHAIN (block);
2599+ }
2600+}
2601+
2602+
2603+static void
2604+push_frame_in_args (parms, push_size, boundary)
2605+ tree parms;
2606+ HOST_WIDE_INT push_size, boundary;
2607+{
2608+ rtx home;
2609+ HOST_WIDE_INT offset;
2610+
2611+ for (; parms; parms = TREE_CHAIN (parms))
2612+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2613+ {
2614+ if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2615+ {
2616+ home = DECL_INCOMING_RTL (parms);
2617+ offset = AUTO_OFFSET(XEXP (home, 0));
2618+
2619+ if (XEXP (home, 0)->used || offset < boundary) continue;
2620+
2621+ /* the operand related to the sweep variable */
2622+ if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2623+ {
2624+ if (XEXP (home, 0) == frame_pointer_rtx)
2625+ XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2626+ push_size);
2627+ else {
2628+ offset += push_size;
2629+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2630+ offset);
2631+ }
2632+
2633+ /* mark */
2634+ XEXP (home, 0)->used = 1;
2635+ }
2636+ }
2637+ }
2638+}
2639+
2640+
2641+static int insn_pushed;
2642+static int *fp_equiv = 0;
2643+
2644+static void
2645+push_frame_of_insns (insn, push_size, boundary)
2646+ rtx insn;
2647+ HOST_WIDE_INT push_size, boundary;
2648+{
2649+ /* init fp_equiv */
2650+ fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2651+
2652+ for (; insn; insn = NEXT_INSN (insn))
2653+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2654+ || GET_CODE (insn) == CALL_INSN)
2655+ {
2656+ insn_pushed = FALSE;
2657+ push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2658+
2659+ if (insn_pushed)
2660+ {
2661+ rtx trial = insn;
2662+ rtx before = PREV_INSN (trial);
2663+ rtx after = NEXT_INSN (trial);
2664+ int has_barrier = 0;
2665+ rtx tem;
2666+ rtx seq = split_insns (PATTERN (insn), insn);
2667+
2668+ /* If we are splitting a JUMP_INSN, it might be followed by a
2669+ BARRIER. We may need to handle this specially. */
2670+ if (after && GET_CODE (after) == BARRIER)
2671+ {
2672+ has_barrier = 1;
2673+ after = NEXT_INSN (after);
2674+ }
2675+
2676+ if (seq && GET_CODE (seq) == SEQUENCE)
2677+ {
2678+ if (XVECLEN (seq, 0) == 2)
2679+ {
2680+ rtx pattern = PATTERN (XVECEXP (seq, 0, 1));
2681+
2682+ if (GET_CODE (pattern) == SET
2683+ && GET_CODE (XEXP (pattern, 0)) == REG
2684+ && GET_CODE (XEXP (pattern, 1)) == PLUS
2685+ && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2686+ && CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
2687+ {
2688+ rtx offset = XEXP (XEXP (pattern, 1), 1);
2689+ fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2690+
2691+ /* replace the pattern of the insn */
2692+ add_insn_after (XVECEXP (seq, 0, 0), before);
2693+ delete_insn (trial);
2694+ goto next;
2695+ }
2696+ }
2697+
2698+ /* excerpt from emit-rtl.c: L3320 */
2699+ tem = emit_insn_after (seq, trial);
2700+
2701+ delete_related_insns (trial);
2702+ if (has_barrier)
2703+ emit_barrier_after (tem);
2704+
2705+ /* Recursively call try_split for each new insn created */
2706+ for (tem = NEXT_INSN (before); tem != after;
2707+ tem = NEXT_INSN (tem))
2708+ if (! INSN_DELETED_P (tem) && INSN_P (tem))
2709+ tem = try_split (PATTERN (tem), tem, 1);
2710+ }
2711+ }
2712+
2713+ next:
2714+ /* push frame in NOTE */
2715+ push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2716+
2717+ /* push frame in CALL EXPR_LIST */
2718+ if (GET_CODE (insn) == CALL_INSN)
2719+ push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn), push_size, boundary);
2720+ }
2721+
2722+ /* Clean up. */
2723+ free (fp_equiv);
2724+}
2725+
2726+
2727+static void
2728+push_frame_in_operand (insn, orig, push_size, boundary)
2729+ rtx insn, orig;
2730+ HOST_WIDE_INT push_size, boundary;
2731+{
2732+ register rtx x = orig;
2733+ register enum rtx_code code;
2734+ int i, j;
2735+ HOST_WIDE_INT offset;
2736+ const char *fmt;
2737+
2738+ if (x == 0)
2739+ return;
2740+
2741+ code = GET_CODE (x);
2742+
2743+ switch (code)
2744+ {
2745+ case CONST_INT:
2746+ case CONST_DOUBLE:
2747+ case CONST:
2748+ case SYMBOL_REF:
2749+ case CODE_LABEL:
2750+ case PC:
2751+ case CC0:
2752+ case ASM_INPUT:
2753+ case ADDR_VEC:
2754+ case ADDR_DIFF_VEC:
2755+ case RETURN:
2756+ case REG:
2757+ case ADDRESSOF:
2758+ case USE:
2759+ return;
2760+
2761+ case SET:
2762+ /*
2763+ skip setjmp setup insn and setjmp restore insn
2764+ alpha case:
2765+ (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2766+ (set (frame_pointer_rtx) (REG))
2767+ */
2768+ if (GET_CODE (XEXP (x, 0)) == MEM
2769+ && XEXP (x, 1) == frame_pointer_rtx)
2770+ return;
2771+ if (XEXP (x, 0) == frame_pointer_rtx
2772+ && GET_CODE (XEXP (x, 1)) == REG)
2773+ return;
2774+
2775+ /*
2776+ powerpc case: restores setjmp address
2777+ (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2778+ or
2779+ (set (reg) (plus frame_pointer_rtx const_int -n))
2780+ (set (frame_pointer_rtx) (reg))
2781+ */
2782+ if (GET_CODE (XEXP (x, 0)) == REG
2783+ && GET_CODE (XEXP (x, 1)) == PLUS
2784+ && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2785+ && CONSTANT_P (XEXP (XEXP (x, 1), 1))
2786+ && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2787+ {
2788+ x = XEXP (x, 1);
2789+ offset = AUTO_OFFSET(x);
2790+ if (x->used || abs (offset) < boundary)
2791+ return;
2792+
2793+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2794+ x->used = 1; insn_pushed = TRUE;
2795+ return;
2796+ }
2797+
2798+ /* reset fp_equiv register */
2799+ else if (GET_CODE (XEXP (x, 0)) == REG
2800+ && fp_equiv[REGNO (XEXP (x, 0))])
2801+ fp_equiv[REGNO (XEXP (x, 0))] = 0;
2802+
2803+ /* propagete fp_equiv register */
2804+ else if (GET_CODE (XEXP (x, 0)) == REG
2805+ && GET_CODE (XEXP (x, 1)) == REG
2806+ && fp_equiv[REGNO (XEXP (x, 1))])
2807+ if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2808+ || reg_renumber[REGNO (XEXP (x, 0))] > 0)
2809+ fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2810+ break;
2811+
2812+ case MEM:
2813+ if (XEXP (x, 0) == frame_pointer_rtx
2814+ && boundary == 0)
2815+ {
2816+ XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2817+ XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2818+ return;
2819+ }
2820+ break;
2821+
2822+ case PLUS:
2823+ offset = AUTO_OFFSET(x);
2824+
2825+ /* Handle special case of frame register plus constant. */
2826+ if (CONSTANT_P (XEXP (x, 1))
2827+ && XEXP (x, 0) == frame_pointer_rtx)
2828+ {
2829+ if (x->used || offset < boundary)
2830+ return;
2831+
2832+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2833+ x->used = 1; insn_pushed = TRUE;
2834+
2835+ return;
2836+ }
2837+ /*
2838+ Handle alpha case:
2839+ (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2840+ */
2841+ if (CONSTANT_P (XEXP (x, 1))
2842+ && GET_CODE (XEXP (x, 0)) == SUBREG
2843+ && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2844+ {
2845+ if (x->used || offset < boundary)
2846+ return;
2847+
2848+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2849+ x->used = 1; insn_pushed = TRUE;
2850+
2851+ return;
2852+ }
2853+ /*
2854+ Handle powerpc case:
2855+ (set (reg x) (plus fp const))
2856+ (set (.....) (... (plus (reg x) (const B))))
2857+ */
2858+ else if (CONSTANT_P (XEXP (x, 1))
2859+ && GET_CODE (XEXP (x, 0)) == REG
2860+ && fp_equiv[REGNO (XEXP (x, 0))])
2861+ {
2862+ if (x->used) return;
2863+
2864+ offset += fp_equiv[REGNO (XEXP (x, 0))];
2865+
2866+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2867+ x->used = 1; insn_pushed = TRUE;
2868+
2869+ return;
2870+ }
2871+ /*
2872+ Handle special case of frame register plus reg (constant).
2873+ (set (reg x) (const B))
2874+ (set (....) (...(plus fp (reg x))))
2875+ */
2876+ else if (XEXP (x, 0) == frame_pointer_rtx
2877+ && GET_CODE (XEXP (x, 1)) == REG
2878+ && PREV_INSN (insn)
2879+ && PATTERN (PREV_INSN (insn))
2880+ && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
2881+ && CONSTANT_P (SET_SRC (PATTERN (PREV_INSN (insn)))))
2882+ {
2883+ HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
2884+
2885+ if (x->used || offset < boundary)
2886+ return;
2887+
2888+ SET_SRC (PATTERN (PREV_INSN (insn)))
2889+ = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2890+ x->used = 1;
2891+ XEXP (x, 1)->used = 1;
2892+
2893+ return;
2894+ }
2895+ /* Handle special case of frame register plus reg (used). */
2896+ else if (XEXP (x, 0) == frame_pointer_rtx
2897+ && XEXP (x, 1)->used)
2898+ {
2899+ x->used = 1;
2900+ return;
2901+ }
2902+ /*
2903+ process further subtree:
2904+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2905+ (const_int 5))
2906+ */
2907+ break;
2908+
2909+ case CALL_PLACEHOLDER:
2910+ push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2911+ push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2912+ push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2913+ break;
2914+
2915+ default:
2916+ break;
2917+ }
2918+
2919+ /* Scan all subexpressions. */
2920+ fmt = GET_RTX_FORMAT (code);
2921+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2922+ if (*fmt == 'e')
2923+ {
2924+ if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2925+ fatal_insn ("push_frame_in_operand", insn);
2926+ push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2927+ }
2928+ else if (*fmt == 'E')
2929+ for (j = 0; j < XVECLEN (x, i); j++)
2930+ push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2931+}
2932+
2933+static void
2934+push_frame_of_reg_equiv_memory_loc (push_size, boundary)
2935+ HOST_WIDE_INT push_size, boundary;
2936+{
2937+ int i;
2938+ extern rtx *reg_equiv_memory_loc;
2939+
2940+ /* This function is processed if the push_frame is called from
2941+ global_alloc (or reload) function */
2942+ if (reg_equiv_memory_loc == 0) return;
2943+
2944+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2945+ if (reg_equiv_memory_loc[i])
2946+ {
2947+ rtx x = reg_equiv_memory_loc[i];
2948+ int offset;
2949+
2950+ if (GET_CODE (x) == MEM
2951+ && GET_CODE (XEXP (x, 0)) == PLUS
2952+ && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2953+ {
2954+ offset = AUTO_OFFSET(XEXP (x, 0));
2955+
2956+ if (! XEXP (x, 0)->used
2957+ && offset >= boundary)
2958+ {
2959+ offset += push_size;
2960+ XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2961+
2962+ /* mark */
2963+ XEXP (x, 0)->used = 1;
2964+ }
2965+ }
2966+ else if (GET_CODE (x) == MEM
2967+ && XEXP (x, 0) == frame_pointer_rtx
2968+ && boundary == 0)
2969+ {
2970+ XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2971+ XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2972+ }
2973+ }
2974+}
2975+
2976+static void
2977+push_frame_of_reg_equiv_constant (push_size, boundary)
2978+ HOST_WIDE_INT push_size, boundary;
2979+{
2980+ int i;
2981+ extern rtx *reg_equiv_constant;
2982+
2983+ /* This function is processed if the push_frame is called from
2984+ global_alloc (or reload) function */
2985+ if (reg_equiv_constant == 0) return;
2986+
2987+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2988+ if (reg_equiv_constant[i])
2989+ {
2990+ rtx x = reg_equiv_constant[i];
2991+ int offset;
2992+
2993+ if (GET_CODE (x) == PLUS
2994+ && XEXP (x, 0) == frame_pointer_rtx)
2995+ {
2996+ offset = AUTO_OFFSET(x);
2997+
2998+ if (! x->used
2999+ && offset >= boundary)
3000+ {
3001+ offset += push_size;
3002+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3003+
3004+ /* mark */
3005+ x->used = 1;
3006+ }
3007+ }
3008+ else if (x == frame_pointer_rtx
3009+ && boundary == 0)
3010+ {
3011+ reg_equiv_constant[i]
3012+ = plus_constant (frame_pointer_rtx, push_size);
3013+ reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
3014+ }
3015+ }
3016+}
3017+
3018+static int
3019+check_out_of_frame_access (insn, boundary)
3020+ rtx insn;
3021+ HOST_WIDE_INT boundary;
3022+{
3023+ for (; insn; insn = NEXT_INSN (insn))
3024+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3025+ || GET_CODE (insn) == CALL_INSN)
3026+ {
3027+ if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
3028+ return TRUE;
3029+ }
3030+ return FALSE;
3031+}
3032+
3033+
3034+static int
3035+check_out_of_frame_access_in_operand (orig, boundary)
3036+ rtx orig;
3037+ HOST_WIDE_INT boundary;
3038+{
3039+ register rtx x = orig;
3040+ register enum rtx_code code;
3041+ int i, j;
3042+ const char *fmt;
3043+
3044+ if (x == 0)
3045+ return FALSE;
3046+
3047+ code = GET_CODE (x);
3048+
3049+ switch (code)
3050+ {
3051+ case CONST_INT:
3052+ case CONST_DOUBLE:
3053+ case CONST:
3054+ case SYMBOL_REF:
3055+ case CODE_LABEL:
3056+ case PC:
3057+ case CC0:
3058+ case ASM_INPUT:
3059+ case ADDR_VEC:
3060+ case ADDR_DIFF_VEC:
3061+ case RETURN:
3062+ case REG:
3063+ case ADDRESSOF:
3064+ return FALSE;
3065+
3066+ case MEM:
3067+ if (XEXP (x, 0) == frame_pointer_rtx)
3068+ if (0 < boundary) return TRUE;
3069+ break;
3070+
3071+ case PLUS:
3072+ /* Handle special case of frame register plus constant. */
3073+ if (CONSTANT_P (XEXP (x, 1))
3074+ && XEXP (x, 0) == frame_pointer_rtx)
3075+ {
3076+ if (0 <= AUTO_OFFSET(x)
3077+ && AUTO_OFFSET(x) < boundary) return TRUE;
3078+ return FALSE;
3079+ }
3080+ /*
3081+ process further subtree:
3082+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
3083+ (const_int 5))
3084+ */
3085+ break;
3086+
3087+ case CALL_PLACEHOLDER:
3088+ if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
3089+ if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
3090+ if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
3091+ break;
3092+
3093+ default:
3094+ break;
3095+ }
3096+
3097+ /* Scan all subexpressions. */
3098+ fmt = GET_RTX_FORMAT (code);
3099+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3100+ if (*fmt == 'e')
3101+ {
3102+ if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
3103+ return TRUE;
3104+ }
3105+ else if (*fmt == 'E')
3106+ for (j = 0; j < XVECLEN (x, i); j++)
3107+ if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
3108+ return TRUE;
3109+
3110+ return FALSE;
3111+}
3112+#endif
abef1cdf 3113diff -urN gcc-3.3.1/gcc/protector.h gcc-3.3.1-pp/gcc/protector.h
a89a5183 3114--- gcc-3.3.1/gcc/protector.h 1970-01-01 00:00:00.000000000 +0000
abef1cdf 3115+++ gcc-3.3.1-pp/gcc/protector.h 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
3116@@ -0,0 +1,48 @@
3117+/* RTL buffer overflow protection function for GNU C compiler
3118+ Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
3119+
3120+This file is part of GCC.
3121+
3122+GCC is free software; you can redistribute it and/or modify it under
3123+the terms of the GNU General Public License as published by the Free
3124+Software Foundation; either version 2, or (at your option) any later
3125+version.
3126+
3127+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
3128+WARRANTY; without even the implied warranty of MERCHANTABILITY or
3129+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
3130+for more details.
3131+
3132+You should have received a copy of the GNU General Public License
3133+along with GCC; see the file COPYING. If not, write to the Free
3134+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
3135+02111-1307, USA. */
3136+
3137+
3138+/* declaration of GUARD variable */
3139+#define GUARD_m Pmode
3140+#define UNITS_PER_GUARD MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
3141+
3142+#ifndef L_stack_smash_handler
3143+
3144+/* insert a guard variable before a character buffer and change the order
3145+ of pointer variables, character buffers and pointer arguments */
3146+
3147+extern void prepare_stack_protection PARAMS ((int inlinable));
3148+
3149+#ifdef TREE_CODE
3150+/* search a character array from the specified type tree */
3151+
3152+extern int search_string_def PARAMS ((tree names));
3153+#endif
3154+
3155+/* examine whether the input contains frame pointer addressing */
3156+
3157+extern int contains_fp PARAMS ((rtx op));
3158+
3159+/* allocate a local variable in the stack area before character buffers
3160+ to avoid the corruption of it */
3161+
3162+extern rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
3163+
3164+#endif
abef1cdf 3165diff -urN gcc-3.3.1/gcc/reload1.c gcc-3.3.1-pp/gcc/reload1.c
a89a5183 3166--- gcc-3.3.1/gcc/reload1.c 2003-06-07 05:30:09.000000000 +0000
abef1cdf 3167+++ gcc-3.3.1-pp/gcc/reload1.c 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
3168@@ -42,6 +42,7 @@
3169 #include "toplev.h"
3170 #include "except.h"
3171 #include "tree.h"
3172+#include "protector.h"
3173
3174 /* This file contains the reload pass of the compiler, which is
3175 run after register allocation has been done. It checks that
3176@@ -1992,7 +1993,7 @@
3177 if (from_reg == -1)
3178 {
3179 /* No known place to spill from => no slot to reuse. */
3180- x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
3181+ x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
3182 inherent_size == total_size ? 0 : -1);
3183 if (BYTES_BIG_ENDIAN)
3184 /* Cancel the big-endian correction done in assign_stack_local.
abef1cdf 3185diff -urN gcc-3.3.1/gcc/simplify-rtx.c gcc-3.3.1-pp/gcc/simplify-rtx.c
a89a5183 3186--- gcc-3.3.1/gcc/simplify-rtx.c 2003-07-03 07:38:22.000000000 +0000
abef1cdf 3187+++ gcc-3.3.1-pp/gcc/simplify-rtx.c 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
3188@@ -1670,7 +1670,8 @@
3189 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
3190 int first, negate, changed;
3191 int i, j;
3192-
3193+ HOST_WIDE_INT fp_offset = 0;
3194+
3195 memset ((char *) ops, 0, sizeof ops);
3196
3197 /* Set up the two operands and then expand them until nothing has been
3198@@ -1695,6 +1696,10 @@
3199 switch (this_code)
3200 {
3201 case PLUS:
3202+ if (flag_propolice_protection
3203+ && XEXP (this_op, 0) == virtual_stack_vars_rtx
3204+ && GET_CODE (XEXP (this_op, 1)) == CONST_INT)
3205+ fp_offset = INTVAL (XEXP (this_op, 1));
3206 case MINUS:
3207 if (n_ops == 7)
3208 return NULL_RTX;
3209@@ -1849,10 +1854,10 @@
3210 && GET_CODE (ops[n_ops - 1].op) == CONST_INT
3211 && CONSTANT_P (ops[n_ops - 2].op))
3212 {
3213- rtx value = ops[n_ops - 1].op;
3214+ int value = INTVAL (ops[n_ops - 1].op);
3215 if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3216- value = neg_const_int (mode, value);
3217- ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3218+ value = -value;
3219+ ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
3220 n_ops--;
3221 }
3222
3223@@ -1871,6 +1876,54 @@
3224 || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
3225 return NULL_RTX;
3226
3227+ if (flag_propolice_protection)
3228+ {
3229+ /* keep the addressing style of local variables
3230+ as (plus (virtual_stack_vars_rtx) (CONST_int x))
3231+ (1) inline function is expanded, (+ (+VFP c1) -c2)=>(+ VFP c1-c2)
3232+ (2) the case ary[r-1], (+ (+VFP c1) (+r -1))=>(+ R (+r -1))
3233+ */
3234+ for (i = 0; i < n_ops; i++)
3235+#ifdef FRAME_GROWS_DOWNWARD
3236+ if (ops[i].op == virtual_stack_vars_rtx)
3237+#else
3238+ if (ops[i].op == virtual_stack_vars_rtx
3239+ || ops[i].op == frame_pointer_rtx)
3240+#endif
3241+ {
3242+ if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
3243+ {
3244+ HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3245+ if (n_ops < 3 || value >= fp_offset)
3246+ {
3247+ ops[i].op = plus_constant (ops[i].op, value);
3248+ n_ops--;
3249+ }
3250+ else
3251+ {
3252+ if (!force
3253+ && (n_ops+1 + n_consts > input_ops
3254+ || (n_ops+1 + n_consts == input_ops && n_consts <= input_consts)))
3255+ return NULL_RTX;
3256+ ops[n_ops - 1].op = GEN_INT (value-fp_offset);
3257+ ops[i].op = plus_constant (ops[i].op, fp_offset);
3258+ }
3259+ }
3260+ /* buf[BUFSIZE]: buf is the first local variable (+ (+ fp -S) S)
3261+ or (+ (fp 0) r) ==> ((+ (+fp 1) r) -1) */
3262+ else if (fp_offset != 0)
3263+ return NULL_RTX;
3264+#ifndef FRAME_GROWS_DOWNWARD
3265+ /*
3266+ * For the case of buf[i], i: REG, buf: (plus fp 0),
3267+ */
3268+ else if (fp_offset == 0)
3269+ return NULL_RTX;
3270+#endif
3271+ break;
3272+ }
3273+ }
3274+
3275 /* Put a non-negated operand first. If there aren't any, make all
3276 operands positive and negate the whole thing later. */
3277
abef1cdf 3278diff -urN gcc-3.3.1/gcc/toplev.c gcc-3.3.1-pp/gcc/toplev.c
a89a5183 3279--- gcc-3.3.1/gcc/toplev.c 2003-07-18 06:59:16.000000000 +0000
abef1cdf 3280+++ gcc-3.3.1-pp/gcc/toplev.c 2003-09-12 13:40:28.000000000 +0000
a89a5183
RJ
3281@@ -904,6 +904,13 @@
3282 minimum function alignment. Zero means no alignment is forced. */
3283 int force_align_functions_log;
3284
3285+#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
3286+/* Nonzero means use propolice as a stack protection method */
3287+int flag_propolice_protection = 1;
3288+#else
3289+int flag_propolice_protection = 0;
3290+#endif
3291+
3292 /* Table of supported debugging formats. */
3293 static const struct
3294 {
3295@@ -1188,6 +1195,10 @@
3296 N_("Trap for signed overflow in addition / subtraction / multiplication") },
3297 { "new-ra", &flag_new_regalloc, 1,
3298 N_("Use graph coloring register allocation.") },
3299+ {"stack-protector", &flag_propolice_protection, 1,
3300+ N_("Enables stack protection") },
3301+ {"no-stack-protector", &flag_propolice_protection, 0,
3302+ N_("Disables stack protection") },
3303 };
3304
3305 /* Table of language-specific options. */
3306@@ -1547,7 +1558,9 @@
3307 {"missing-noreturn", &warn_missing_noreturn, 1,
3308 N_("Warn about functions which might be candidates for attribute noreturn") },
3309 {"strict-aliasing", &warn_strict_aliasing, 1,
3310- N_ ("Warn about code which might break the strict aliasing rules") }
3311+ N_ ("Warn about code which might break the strict aliasing rules") },
3312+ {"stack-protector", &warn_stack_protector, 1,
3313+ N_("Warn when disabling stack protector for some reason")}
3314 };
3315
3316 void
3317@@ -2449,6 +2462,8 @@
3318
3319 insns = get_insns ();
3320
3321+ if (flag_propolice_protection) prepare_stack_protection (inlinable);
3322+
3323 /* Dump the rtl code if we are dumping rtl. */
3324
3325 if (open_dump_file (DFI_rtl, decl))
This page took 0.444296 seconds and 4 git commands to generate.