]> git.pld-linux.org Git - packages/crossppc-gcc.git/blame - gcc-pr24419.patch
- more
[packages/crossppc-gcc.git] / gcc-pr24419.patch
CommitLineData
170f3820
PS
1
2 PR target/24419
3 * i386.c (pro_epilogue_adjust_stack): Add another argument to
4 indicate if memory should be clobbered or not. Adjust stack
5 pointer directly if memory shouldn't be clobbered.
6 (ix86_expand_prologue): Updated. Don't clobber member if
7 adjusting stack after registers have been saved on stack.
8 (ix86_expand_epilogue): Updated to call
9 pro_epilogue_adjust_stack with memory clobbered.
10
11 * i386.md: Don't convert stack pointer subtractions to push
12 when memory isn't clobbered if red zone is enabled.
13
14--- gcc/gcc/config/i386/i386.c 2005-10-18 17:26:40.000000000 -0700
15+++ gcc/gcc/config/i386/i386.c 2005-10-18 20:35:19.000000000 -0700
16@@ -4761,22 +4761,38 @@ ix86_emit_save_regs_using_mov (rtx point
17 otherwise. */
18
19 static void
20-pro_epilogue_adjust_stack (rtx dest, rtx src, rtx offset, int style)
21+pro_epilogue_adjust_stack (rtx dest, rtx src, rtx offset, int style,
22+ bool clobber_memory)
23 {
24 rtx insn;
25
26 if (! TARGET_64BIT)
27- insn = emit_insn (gen_pro_epilogue_adjust_stack_1 (dest, src, offset));
28+ {
29+ if (clobber_memory)
30+ insn = emit_insn (gen_pro_epilogue_adjust_stack_1 (dest, src,
31+ offset));
32+ else
33+ insn = emit_insn (gen_addsi3 (dest, src, offset));
34+ }
35 else if (x86_64_immediate_operand (offset, DImode))
36- insn = emit_insn (gen_pro_epilogue_adjust_stack_rex64 (dest, src, offset));
37+ {
38+ if (clobber_memory)
39+ insn = emit_insn (gen_pro_epilogue_adjust_stack_rex64 (dest,
40+ src,
41+ offset));
42+ else
43+ insn = emit_insn (gen_adddi3 (dest, src, offset));
44+ }
45 else
46 {
47 rtx r11;
48 /* r11 is used by indirect sibcall return as well, set before the
49 epilogue and used after the epilogue. ATM indirect sibcall
50 shouldn't be used together with huge frame sizes in one
51- function because of the frame_size check in sibcall.c. */
52- gcc_assert (style);
53+ function because of the frame_size check in sibcall.c. If
54+ huge frame size is used, memory should always be clobbered
55+ when stack is adjusted. */
56+ gcc_assert (style && clobber_memory);
57 r11 = gen_rtx_REG (DImode, FIRST_REX_INT_REG + 3 /* R11 */);
58 insn = emit_insn (gen_rtx_SET (DImode, r11, offset));
59 if (style < 0)
60@@ -4797,6 +4813,7 @@ ix86_expand_prologue (void)
61 bool pic_reg_used;
62 struct ix86_frame frame;
63 HOST_WIDE_INT allocate;
64+ bool using_mov;
65
66 ix86_compute_frame_layout (&frame);
67
68@@ -4821,7 +4838,8 @@ ix86_expand_prologue (void)
69
70 /* When using red zone we may start register saving before allocating
71 the stack frame saving one cycle of the prologue. */
72- if (TARGET_RED_ZONE && frame.save_regs_using_mov)
73+ using_mov = TARGET_RED_ZONE && frame.save_regs_using_mov;
74+ if (using_mov)
75 ix86_emit_save_regs_using_mov (frame_pointer_needed ? hard_frame_pointer_rtx
76 : stack_pointer_rtx,
77 -frame.nregs * UNITS_PER_WORD);
78@@ -4830,7 +4848,7 @@ ix86_expand_prologue (void)
79 ;
80 else if (! TARGET_STACK_PROBE || allocate < CHECK_STACK_LIMIT)
81 pro_epilogue_adjust_stack (stack_pointer_rtx, stack_pointer_rtx,
82- GEN_INT (-allocate), -1);
83+ GEN_INT (-allocate), -1, !using_mov);
84 else
85 {
86 /* Only valid for Win32. */
87@@ -5011,7 +5029,7 @@ ix86_expand_epilogue (int style)
88 emit_move_insn (hard_frame_pointer_rtx, tmp);
89
90 pro_epilogue_adjust_stack (stack_pointer_rtx, sa,
91- const0_rtx, style);
92+ const0_rtx, style, true);
93 }
94 else
95 {
96@@ -5025,7 +5043,7 @@ ix86_expand_epilogue (int style)
97 pro_epilogue_adjust_stack (stack_pointer_rtx, stack_pointer_rtx,
98 GEN_INT (frame.to_allocate
99 + frame.nregs * UNITS_PER_WORD),
100- style);
101+ style, true);
102 /* If not an i386, mov & pop is faster than "leave". */
103 else if (TARGET_USE_LEAVE || optimize_size
104 || !cfun->machine->use_fast_prologue_epilogue)
105@@ -5034,7 +5052,7 @@ ix86_expand_epilogue (int style)
106 {
107 pro_epilogue_adjust_stack (stack_pointer_rtx,
108 hard_frame_pointer_rtx,
109- const0_rtx, style);
110+ const0_rtx, style, true);
111 if (TARGET_64BIT)
112 emit_insn (gen_popdi1 (hard_frame_pointer_rtx));
113 else
114@@ -5050,11 +5068,12 @@ ix86_expand_epilogue (int style)
115 gcc_assert (frame_pointer_needed);
116 pro_epilogue_adjust_stack (stack_pointer_rtx,
117 hard_frame_pointer_rtx,
118- GEN_INT (offset), style);
119+ GEN_INT (offset), style, true);
120 }
121 else if (frame.to_allocate)
122 pro_epilogue_adjust_stack (stack_pointer_rtx, stack_pointer_rtx,
123- GEN_INT (frame.to_allocate), style);
124+ GEN_INT (frame.to_allocate), style,
125+ true);
126
127 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
128 if (ix86_save_reg (regno, false))
129--- gcc/gcc/config/i386/i386.md 2005-10-18 17:26:40.000000000 -0700
130+++ gcc/gcc/config/i386/i386.md 2005-10-18 17:26:40.000000000 -0700
131@@ -19532,11 +19532,15 @@
132 (clobber (mem:BLK (scratch)))])])
133
134 ;; Convert esp subtractions to push.
135+;; This conversion is safe only under assumption that unallocated stack is
136+;; implicitly clobbered as specified by 32bit ABI (for signal handlers and such).
137+;; This is not valid with red zone, but we can work harder and enable the
138+;; optimization for functions that are not using it.
139 (define_peephole2
140 [(match_scratch:SI 0 "r")
141 (parallel [(set (reg:SI SP_REG) (plus:SI (reg:SI SP_REG) (const_int -4)))
142 (clobber (reg:CC FLAGS_REG))])]
143- "optimize_size || !TARGET_SUB_ESP_4"
144+ "(optimize_size || !TARGET_SUB_ESP_4) && !TARGET_RED_ZONE"
145 [(clobber (match_dup 0))
146 (set (mem:SI (pre_dec:SI (reg:SI SP_REG))) (match_dup 0))])
147
148@@ -19544,7 +19548,7 @@
149 [(match_scratch:SI 0 "r")
150 (parallel [(set (reg:SI SP_REG) (plus:SI (reg:SI SP_REG) (const_int -8)))
151 (clobber (reg:CC FLAGS_REG))])]
152- "optimize_size || !TARGET_SUB_ESP_8"
153+ "(optimize_size || !TARGET_SUB_ESP_8) && !TARGET_RED_ZONE"
154 [(clobber (match_dup 0))
155 (set (mem:SI (pre_dec:SI (reg:SI SP_REG))) (match_dup 0))
156 (set (mem:SI (pre_dec:SI (reg:SI SP_REG))) (match_dup 0))])
157@@ -19664,11 +19668,15 @@
158 (clobber (mem:BLK (scratch)))])])
159
160 ;; Convert esp subtractions to push.
161+;; This conversion is safe only under assumption that unallocated stack is
162+;; implicitly clobbered as specified by 32bit ABI (for signal handlers and such).
163+;; This is not valid with red zone, but we can work harder and enable the
164+;; optimization for functions that are not using it.
165 (define_peephole2
166 [(match_scratch:DI 0 "r")
167 (parallel [(set (reg:DI SP_REG) (plus:DI (reg:DI SP_REG) (const_int -8)))
168 (clobber (reg:CC FLAGS_REG))])]
169- "optimize_size || !TARGET_SUB_ESP_4"
170+ "(optimize_size || !TARGET_SUB_ESP_4) && !TARGET_RED_ZONE"
171 [(clobber (match_dup 0))
172 (set (mem:DI (pre_dec:DI (reg:DI SP_REG))) (match_dup 0))])
173
174@@ -19676,7 +19684,7 @@
175 [(match_scratch:DI 0 "r")
176 (parallel [(set (reg:DI SP_REG) (plus:DI (reg:DI SP_REG) (const_int -16)))
177 (clobber (reg:CC FLAGS_REG))])]
178- "optimize_size || !TARGET_SUB_ESP_8"
179+ "(optimize_size || !TARGET_SUB_ESP_8) && !TARGET_RED_ZONE"
180 [(clobber (match_dup 0))
181 (set (mem:DI (pre_dec:DI (reg:DI SP_REG))) (match_dup 0))
182 (set (mem:DI (pre_dec:DI (reg:DI SP_REG))) (match_dup 0))])
183
This page took 0.071937 seconds and 4 git commands to generate.