1 diff -Naurp gcc/config/avr/avr.c gcc/config/avr/avr.c
2 --- gcc/config/avr/avr.c 2011-10-27 17:00:24.000000000 +0530
3 +++ gcc/config/avr/avr.c 2011-10-27 17:47:15.000000000 +0530
4 @@ -274,8 +274,8 @@ avr_option_override (void)
5 avr_current_arch = &avr_arch_types[avr_current_device->arch];
6 avr_extra_arch_macro = avr_current_device->macro;
8 - tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
9 - zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
10 + tmp_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? TMP_REGNO_AVRTINY10 : TMP_REGNO);
11 + zero_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? ZERO_REGNO_AVRTINY10 : ZERO_REGNO);
13 init_machine_status = avr_init_machine_status;
15 @@ -1771,7 +1771,7 @@ avr_simplify_comparison_p (enum machine_
17 function_arg_regno_p(int r)
19 - return (r >= 8 && r <= 25);
20 + return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
23 /* Initializing the variable cum for the state at the beginning
24 @@ -1781,7 +1781,11 @@ void
25 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
26 tree fndecl ATTRIBUTE_UNUSED)
33 cum->regno = FIRST_CUM_REG;
34 if (!libname && stdarg_p (fntype))
36 @@ -1799,9 +1803,8 @@ avr_num_arg_regs (enum machine_mode mode
38 size = GET_MODE_SIZE (mode);
40 - /* Align all function arguments to start in even-numbered registers.
41 + /* if not AVR_TINY, Align all function arguments to start in even-numbered registers.
42 Odd-sized arguments leave holes above them. */
44 return (size + 1) & ~1;
47 @@ -2133,10 +2136,20 @@ out_movqi_r_mr (rtx insn, rtx op[], int
48 fatal_insn ("incorrect insn:",insn);
50 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
51 - return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
52 + return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-63))) CR_TAB
53 + AS2 (sbci,r29,hi8(-(%o1-63))) CR_TAB
54 + AS2 (subi,r28,lo8(-63)) CR_TAB
55 + AS2 (sbci,r29,hi8(-63)) CR_TAB
56 + AS2 (ld,%0,Y) CR_TAB
57 + AS2 (subi,r28,lo8(63)) CR_TAB
58 + AS2 (sbci,r29,hi8(63)) CR_TAB
59 + AS2 (subi,r28,lo8(%o1-63)) CR_TAB
60 + AS2 (sbci,r29,hi8(%o1-63)))
61 + : (AS2 (adiw,r28,%o1-63) CR_TAB
62 AS2 (ldd,%0,Y+63) CR_TAB
63 AS2 (sbiw,r28,%o1-63));
66 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
67 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
69 @@ -2149,15 +2162,38 @@ out_movqi_r_mr (rtx insn, rtx op[], int
70 it but I have this situation with extremal optimizing options. */
71 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
72 || reg_unused_after (insn, XEXP (x,0)))
73 - return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
74 + return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
75 + AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
77 + : (AS2 (adiw,r26,%o1) CR_TAB
80 - return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
81 + return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
82 + AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
83 + AS2 (ld,%0,X) CR_TAB
84 + AS2 (subi,r26,lo8(%o1)) CR_TAB
85 + AS2 (sbci,r27,hi8(%o1)))
86 + : (AS2 (adiw,r26,%o1) CR_TAB
92 - return AS2 (ldd,%0,%1);
94 + if(REGNO(op[2]) == REG_Y)
95 + return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
96 + AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
97 + AS2 (ld,%0,Y) CR_TAB
98 + AS2 (subi,%A2,lo8(%o1)) CR_TAB
99 + AS2 (sbci,%B2,hi8(%o1)))
101 + if(REGNO(op[2]) == REG_Z)
102 + return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
103 + AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
104 + AS2 (ld,%0,Z) CR_TAB
105 + AS2 (subi,%A2,lo8(%o1)) CR_TAB
106 + AS2 (sbci,%B2,hi8(%o1)))
110 return AS2 (ld,%0,%1);
111 @@ -2197,14 +2233,34 @@ out_movhi_r_mr (rtx insn, rtx op[], int
115 - return (AS2 (ld,%A0,X+) CR_TAB
116 + return AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB
117 + AS2 (ld,%B0,X) CR_TAB
118 + AS2 (subi,r26,lo8(1)) CR_TAB
119 + AS2 (sbci,r27,hi8(1)))
120 + : (AS2 (ld,%A0,X+) CR_TAB
121 AS2 (ld,%B0,X) CR_TAB
127 - return (AS2 (ld,%A0,%1) CR_TAB
128 + if(reg_base == REG_Y)
129 + return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB
130 + AS2 (subi,r28,lo8((-1))) CR_TAB
131 + AS2 (sbci,r29,hi8((-1))) CR_TAB
132 + AS2 (ld,%B0,%1) CR_TAB
133 + AS2 (subi,r28,lo8(1)) CR_TAB
134 + AS2 (sbci,r29,hi8(1)))
135 + : (AS2 (ld,%A0,%1) CR_TAB
136 + AS2 (ldd,%B0,%1+1));
137 + if(reg_base == REG_Z)
138 + return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB
139 + AS2 (subi,r30,lo8((-1))) CR_TAB
140 + AS2 (sbci,r31,hi8((-1))) CR_TAB
141 + AS2 (ld,%B0,%1) CR_TAB
142 + AS2 (subi,r30,lo8(1)) CR_TAB
143 + AS2 (sbci,r31,hi8(1)))
144 + : (AS2 (ld,%A0,%1) CR_TAB
148 @@ -2219,12 +2275,30 @@ out_movhi_r_mr (rtx insn, rtx op[], int
149 fatal_insn ("incorrect insn:",insn);
151 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
152 - return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
153 + return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-62))) CR_TAB
154 + AS2 (sbci,r29,hi8(-(%o1-62))) CR_TAB
155 + AS2 (subi,r28,lo8(-62)) CR_TAB
156 + AS2 (sbci,r29,hi8(-62)) CR_TAB
157 + AS2 (ld,%A0,Y+) CR_TAB
158 + AS2 (ld,%B0,Y) CR_TAB
159 + AS2 (subi,r28,lo8(63)) CR_TAB
160 + AS2 (sbci,r29,hi8(63)) CR_TAB
161 + AS2 (subi,r28,lo8(%o1-62)) CR_TAB
162 + AS2 (sbci,r29,hi8(%o1-62)))
163 + : (AS2 (adiw,r28,%o1-62) CR_TAB
164 AS2 (ldd,%A0,Y+62) CR_TAB
165 AS2 (ldd,%B0,Y+63) CR_TAB
166 AS2 (sbiw,r28,%o1-62));
168 - return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
169 + return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB
170 + AS2 (sbci,r29,hi8(-%o1)) CR_TAB
171 + AS2 (ld,%A0,Y+) CR_TAB
172 + AS2 (ld,%B0,Y) CR_TAB
173 + AS2 (subi,r28,lo8(1)) CR_TAB
174 + AS2 (sbci,r29,hi8(1)) CR_TAB
175 + AS2 (subi,r28,lo8(%o1)) CR_TAB
176 + AS2 (sbci,r29,hi8(%o1)))
177 + : (AS2 (subi,r28,lo8(-%o1)) CR_TAB
178 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
179 AS2 (ld,%A0,Y) CR_TAB
180 AS2 (ldd,%B0,Y+1) CR_TAB
181 @@ -2239,12 +2313,23 @@ out_movhi_r_mr (rtx insn, rtx op[], int
184 if (reg_base == reg_dest)
185 - return (AS2 (adiw,r26,%o1) CR_TAB
186 + return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1)) CR_TAB
187 + AS2 (sbci,r27,hi8(-%o1)) CR_TAB
188 + AS2 (ld,__tmp_reg__,X+) CR_TAB
189 + AS2 (ld,%B0,X) CR_TAB
190 + AS2 (mov,%A0,__tmp_reg__))
191 + : (AS2 (adiw,r26,%o1) CR_TAB
192 AS2 (ld,__tmp_reg__,X+) CR_TAB
193 AS2 (ld,%B0,X) CR_TAB
194 AS2 (mov,%A0,__tmp_reg__));
196 - return (AS2 (adiw,r26,%o1) CR_TAB
197 + return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1)) CR_TAB
198 + AS2 (sbci,r27,hi8(-%o1)) CR_TAB
199 + AS2 (ld,%A0,X+) CR_TAB
200 + AS2 (ld,%B0,X) CR_TAB
201 + AS2 (subi,r26,lo8(%o1+1)) CR_TAB
202 + AS2 (sbci,r27,hi8(%o1+1)))
203 + : (AS2 (adiw,r26,%o1) CR_TAB
204 AS2 (ld,%A0,X+) CR_TAB
205 AS2 (ld,%B0,X) CR_TAB
206 AS2 (sbiw,r26,%o1+1));
207 @@ -2253,14 +2338,54 @@ out_movhi_r_mr (rtx insn, rtx op[], int
208 if (reg_base == reg_dest)
211 - return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
212 + op[2] = XEXP(base, 0);
214 + if(REGNO(op[2]) == REG_Y)
215 + return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
216 + AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
217 + AS2 (ld,__tmp_reg__,Y+) CR_TAB
218 + AS2 (ld,%B0,Y) CR_TAB
219 + AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
220 + AS2 (subi,%B2,hi8(%o1+1)) CR_TAB
221 + AS2 (mov,%A0,__tmp_reg__))
222 + : (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
223 + AS2 (ldd,%B0,%B1) CR_TAB
224 + AS2 (mov,%A0,__tmp_reg__));
225 + if(REGNO(op[2]) == REG_Z)
226 + return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
227 + AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
228 + AS2 (ld,__tmp_reg__,Z+) CR_TAB
229 + AS2 (ld,%B0,Z) CR_TAB
230 + AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
231 + AS2 (subi,%B2,hi8(%o1+1)) CR_TAB
232 + AS2 (mov,%A0,__tmp_reg__))
233 + : (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
234 AS2 (ldd,%B0,%B1) CR_TAB
235 AS2 (mov,%A0,__tmp_reg__));
239 - return (AS2 (ldd,%A0,%A1) CR_TAB
241 + op[2] = XEXP(base, 0);
243 + if(REGNO(op[2]) == REG_Y)
244 + return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
245 + AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
246 + AS2 (ld,%A0,Y+) CR_TAB
247 + AS2 (ld,%B0,Y) CR_TAB
248 + AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
249 + AS2 (subi,%B2,hi8(%o1+1)))
250 + : (AS2 (ldd,%A0,%A1) CR_TAB
251 + AS2 (ldd,%B0,%B1));
252 + if(REGNO(op[2]) == REG_Z)
253 + return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
254 + AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
255 + AS2 (ld,%A0,Z+) CR_TAB
256 + AS2 (ld,%B0,Z) CR_TAB
257 + AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
258 + AS2 (subi,%B2,hi8(%o1+1)))
259 + : (AS2 (ldd,%A0,%A1) CR_TAB
263 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
265 @@ -2272,7 +2397,13 @@ out_movhi_r_mr (rtx insn, rtx op[], int
266 if (REGNO (XEXP (base, 0)) == REG_X)
269 - return (AS2 (sbiw,r26,2) CR_TAB
270 + return AVR_TINY ? (AS2 (subi,r26,lo8(2)) CR_TAB
271 + AS2 (sbci,r27,hi8(2)) CR_TAB
272 + AS2 (ld,%A0,X+) CR_TAB
273 + AS2 (ld,%B0,X) CR_TAB
274 + AS2 (subi,r26,lo8(1)) CR_TAB
275 + AS2 (sbci,r27,hi8(1)))
276 + : (AS2 (sbiw,r26,2) CR_TAB
277 AS2 (ld,%A0,X+) CR_TAB
278 AS2 (ld,%B0,X) CR_TAB
280 @@ -2280,7 +2411,16 @@ out_movhi_r_mr (rtx insn, rtx op[], int
284 - return (AS2 (sbiw,%r1,2) CR_TAB
285 + //FIXME:check the code once again for AVR_TINY
286 + return AVR_TINY ? (AS2 (subi,%A1,lo8(3)) CR_TAB
287 + AS2 (sbci,%B1,hi8(3)) CR_TAB
288 + AS2 (ld,%A0,%p1) CR_TAB
289 + AS2 (subi,%A1,lo8(-1)) CR_TAB
290 + AS2 (sbci,%B1,hi8(-1)) CR_TAB
291 + AS2 (ld,%B0,%p1) CR_TAB
292 + AS2 (subi,%A1,lo8(1)) CR_TAB
293 + AS2 (sbci,%B1,hi8(1)))
294 + : (AS2 (sbiw,%r1,2) CR_TAB
295 AS2 (ld,%A0,%p1) CR_TAB
296 AS2 (ldd,%B0,%p1+1));
298 @@ -2336,13 +2476,23 @@ out_movsi_r_mr (rtx insn, rtx op[], int
300 if (reg_dest == REG_X)
301 /* "ld r26,-X" is undefined */
302 - return *l=7, (AS2 (adiw,r26,3) CR_TAB
303 + return *l=7, AVR_TINY ? (AS2 (subi,r26,lo8(-3)) CR_TAB
304 + AS2 (sbci,r27,hi8(-3)) CR_TAB
305 + AS2 (ld,r29,X) CR_TAB
306 + AS2 (ld,r28,-X) CR_TAB
307 + AS2 (ld,__tmp_reg__,-X) CR_TAB
308 + AS2 (subi,r26,lo8(1)) CR_TAB
309 + AS2 (sbci,r27,hi8(1)) CR_TAB
310 + AS2 (ld,r26,X) CR_TAB
311 + AS2 (mov,r27,__tmp_reg__))
312 + : (AS2 (adiw,r26,3) CR_TAB
313 AS2 (ld,r29,X) CR_TAB
314 AS2 (ld,r28,-X) CR_TAB
315 AS2 (ld,__tmp_reg__,-X) CR_TAB
316 AS2 (sbiw,r26,1) CR_TAB
317 AS2 (ld,r26,X) CR_TAB
318 AS2 (mov,r27,__tmp_reg__));
320 else if (reg_dest == REG_X - 2)
321 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
322 AS2 (ld,%B0,X+) CR_TAB
323 @@ -2355,7 +2505,13 @@ out_movsi_r_mr (rtx insn, rtx op[], int
324 AS2 (ld,%C0,X+) CR_TAB
327 - return *l=5, (AS2 (ld,%A0,X+) CR_TAB
328 + return *l=5, AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB
329 + AS2 (ld,%B0,X+) CR_TAB
330 + AS2 (ld,%C0,X+) CR_TAB
331 + AS2 (ld,%D0,X) CR_TAB
332 + AS2 (subi,r26,lo8(3)) CR_TAB
333 + AS2 (sbci,r27,hi8(3)))
334 + : (AS2 (ld,%A0,X+) CR_TAB
335 AS2 (ld,%B0,X+) CR_TAB
336 AS2 (ld,%C0,X+) CR_TAB
337 AS2 (ld,%D0,X) CR_TAB
338 @@ -2364,22 +2520,97 @@ out_movsi_r_mr (rtx insn, rtx op[], int
341 if (reg_dest == reg_base)
342 - return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
344 + if(reg_base == REG_Y)
345 + return *l=5, AVR_TINY ? (AS2 (subi,r28,lo8(-3)) CR_TAB
346 + AS2 (sbci,r29,hi8(-3)) CR_TAB
347 + AS2 (ld,%D0,Y) CR_TAB
348 + AS2 (ld,%C0,-Y) CR_TAB
349 + AS2 (subi,r28,lo8(1)) CR_TAB
350 + AS2 (sbci,r29,hi8(1)) CR_TAB
351 + AS2 (ld,__tmp_reg__,%1) CR_TAB
352 + AS2 (subi,r28,lo8(1)) CR_TAB
353 + AS2 (sbci,r29,hi8(1)) CR_TAB
354 + AS2 (ld,%A0,%1) CR_TAB
355 + AS2 (mov,%B0,__tmp_reg__))
356 + : (AS2 (ldd,%D0,%1+3) CR_TAB
357 + AS2 (ldd,%C0,%1+2) CR_TAB
358 + AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
359 + AS2 (ld,%A0,%1) CR_TAB
360 + AS2 (mov,%B0,__tmp_reg__));
361 + if(reg_base == REG_Z)
362 + return *l=5, AVR_TINY ? (AS2 (subi,r30,lo8(-3)) CR_TAB
363 + AS2 (sbci,r31,hi8(-3)) CR_TAB
364 + AS2 (ld,%D0,Z) CR_TAB
365 + AS2 (ld,%C0,-Z) CR_TAB
366 + AS2 (subi,r30,lo8(1)) CR_TAB
367 + AS2 (sbci,r31,hi8(1)) CR_TAB
368 + AS2 (ld,__tmp_reg__,%1) CR_TAB
369 + AS2 (subi,r30,lo8(1)) CR_TAB
370 + AS2 (sbci,r31,hi8(1)) CR_TAB
371 + AS2 (ld,%A0,%1) CR_TAB
372 + AS2 (mov,%B0,__tmp_reg__))
373 + : (AS2 (ldd,%D0,%1+3) CR_TAB
374 AS2 (ldd,%C0,%1+2) CR_TAB
375 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
376 AS2 (ld,%A0,%1) CR_TAB
377 AS2 (mov,%B0,__tmp_reg__));
380 else if (reg_base == reg_dest + 2)
381 - return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
383 + if(reg_base == REG_Y)
384 + return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Y+) CR_TAB
385 + AS2 (ld,%B0,Y+) CR_TAB
386 + AS2 (ld,__tmp_reg__,Y+) CR_TAB
387 + AS2 (ld,%D0,Y) CR_TAB
388 + AS2 (subi,r28,lo8(3)) CR_TAB
389 + AS2 (sbci,r29,hi8(3)) CR_TAB
390 + AS2 (mov,%C0,__tmp_reg__))
391 + : (AS2 (ld ,%A0,%1) CR_TAB
392 + AS2 (ldd,%B0,%1+1) CR_TAB
393 + AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
394 + AS2 (ldd,%D0,%1+3) CR_TAB
395 + AS2 (mov,%C0,__tmp_reg__));
396 + if(reg_base == REG_Z)
397 + return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Z+) CR_TAB
398 + AS2 (ld,%B0,Z+) CR_TAB
399 + AS2 (ld,__tmp_reg__,Z+) CR_TAB
400 + AS2 (ld,%D0,Z) CR_TAB
401 + AS2 (subi,r30,lo8(3)) CR_TAB
402 + AS2 (sbci,r31,hi8(3)) CR_TAB
403 + AS2 (mov,%C0,__tmp_reg__))
404 + : (AS2 (ld ,%A0,%1) CR_TAB
405 AS2 (ldd,%B0,%1+1) CR_TAB
406 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
407 AS2 (ldd,%D0,%1+3) CR_TAB
408 AS2 (mov,%C0,__tmp_reg__));
411 - return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
413 + if(reg_base == REG_Y)
414 + return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Y+) CR_TAB
415 + AS2 (ld,%B0,Y+) CR_TAB
416 + AS2 (ld,%C0,Y+) CR_TAB
417 + AS2 (ld,%D0,Y) CR_TAB
418 + AS2 (subi,r28,lo8(3)) CR_TAB
419 + AS2 (sbci,r29,hi8(3)))
420 + : (AS2 (ld ,%A0,%1) CR_TAB
421 AS2 (ldd,%B0,%1+1) CR_TAB
422 AS2 (ldd,%C0,%1+2) CR_TAB
424 + if(reg_base == REG_Z)
425 + return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Z+) CR_TAB
426 + AS2 (ld,%B0,Z+) CR_TAB
427 + AS2 (ld,%C0,Z+) CR_TAB
428 + AS2 (ld,%D0,Z) CR_TAB
429 + AS2 (subi,r30,lo8(3)) CR_TAB
430 + AS2 (sbci,r31,hi8(3)))
431 + : (AS2 (ld ,%A0,%1) CR_TAB
432 + AS2 (ldd,%B0,%1+1) CR_TAB
433 + AS2 (ldd,%C0,%1+2) CR_TAB
434 + AS2 (ldd,%D0,%1+3));
438 else if (GET_CODE (base) == PLUS) /* (R + i) */
439 @@ -2392,14 +2623,36 @@ out_movsi_r_mr (rtx insn, rtx op[], int
440 fatal_insn ("incorrect insn:",insn);
442 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
443 - return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
444 + return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-60))) CR_TAB
445 + AS2 (sbci,r29,hi8(-(%o1-60))) CR_TAB
446 + AS2 (subi,r28,lo8(-60)) CR_TAB
447 + AS2 (sbci,r29,hi8(-60)) CR_TAB
448 + AS2 (ld,%A0,Y+) CR_TAB
449 + AS2 (ld,%B0,Y+) CR_TAB
450 + AS2 (ld,%C0,Y+) CR_TAB
451 + AS2 (ld,%D0,Y) CR_TAB
452 + AS2 (subi,r28,lo8(63)) CR_TAB
453 + AS2 (sbci,r29,hi8(63)) CR_TAB
454 + AS2 (subi,r28,lo8(%o1-60)) CR_TAB
455 + AS2 (sbci,r29,hi8(%o1-60)))
456 + : (AS2 (adiw,r28,%o1-60) CR_TAB
457 AS2 (ldd,%A0,Y+60) CR_TAB
458 AS2 (ldd,%B0,Y+61) CR_TAB
459 AS2 (ldd,%C0,Y+62) CR_TAB
460 AS2 (ldd,%D0,Y+63) CR_TAB
461 AS2 (sbiw,r28,%o1-60));
463 - return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
464 + return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB
465 + AS2 (sbci,r29,hi8(-%o1)) CR_TAB
466 + AS2 (ld,%A0,Y+) CR_TAB
467 + AS2 (ld,%B0,Y+) CR_TAB
468 + AS2 (ld,%C0,Y+) CR_TAB
469 + AS2 (ld,%D0,Y) CR_TAB
470 + AS2 (subi,r28,lo8(3)) CR_TAB
471 + AS2 (sbci,r29,hi8(3)) CR_TAB
472 + AS2 (subi,r28,lo8(%o1)) CR_TAB
473 + AS2 (sbci,r29,hi8(%o1)))
474 + : (AS2 (subi,r28,lo8(-%o1)) CR_TAB
475 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
476 AS2 (ld,%A0,Y) CR_TAB
477 AS2 (ldd,%B0,Y+1) CR_TAB
478 @@ -2417,7 +2670,16 @@ out_movsi_r_mr (rtx insn, rtx op[], int
481 /* "ld r26,-X" is undefined */
482 - return (AS2 (adiw,r26,%o1+3) CR_TAB
483 + return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1+3))) CR_TAB
484 + AS2 (sbci,r27,hi8(-(%o1+3))) CR_TAB
485 + AS2 (ld,r29,X) CR_TAB
486 + AS2 (ld,r28,-X) CR_TAB
487 + AS2 (ld,__tmp_reg__,-X) CR_TAB
488 + AS2 (subi,r26,lo8(1)) CR_TAB
489 + AS2 (sbci,r27,hi8(1)) CR_TAB
490 + AS2 (ld,r26,X) CR_TAB
491 + AS2 (mov,r27,__tmp_reg__))
492 + : (AS2 (adiw,r26,%o1+3) CR_TAB
493 AS2 (ld,r29,X) CR_TAB
494 AS2 (ld,r28,-X) CR_TAB
495 AS2 (ld,__tmp_reg__,-X) CR_TAB
496 @@ -2427,14 +2689,29 @@ out_movsi_r_mr (rtx insn, rtx op[], int
499 if (reg_dest == REG_X - 2)
500 - return (AS2 (adiw,r26,%o1) CR_TAB
501 + return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
502 + AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
503 + AS2 (ld,r24,X+) CR_TAB
504 + AS2 (ld,r25,X+) CR_TAB
505 + AS2 (ld,__tmp_reg__,X+) CR_TAB
506 + AS2 (ld,r27,X) CR_TAB
507 + AS2 (mov,r26,__tmp_reg__))
508 + : (AS2 (adiw,r26,%o1) CR_TAB
509 AS2 (ld,r24,X+) CR_TAB
510 AS2 (ld,r25,X+) CR_TAB
511 AS2 (ld,__tmp_reg__,X+) CR_TAB
512 AS2 (ld,r27,X) CR_TAB
513 AS2 (mov,r26,__tmp_reg__));
515 - return (AS2 (adiw,r26,%o1) CR_TAB
516 + return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
517 + AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
518 + AS2 (ld,%A0,X+) CR_TAB
519 + AS2 (ld,%B0,X+) CR_TAB
520 + AS2 (ld,%C0,X+) CR_TAB
521 + AS2 (ld,%D0,X) CR_TAB
522 + AS2 (subi,r26,lo8(%o1+3)) CR_TAB
523 + AS2 (sbci,r27,hi8(%o1+3)))
524 + : (AS2 (adiw,r26,%o1) CR_TAB
525 AS2 (ld,%A0,X+) CR_TAB
526 AS2 (ld,%B0,X+) CR_TAB
527 AS2 (ld,%C0,X+) CR_TAB
528 @@ -2442,18 +2719,99 @@ out_movsi_r_mr (rtx insn, rtx op[], int
529 AS2 (sbiw,r26,%o1+3));
531 if (reg_dest == reg_base)
532 - return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
534 + op[2] = XEXP(base, 0);
536 + if(REGNO(op[2]) == REG_Y)
537 + return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
538 + AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
539 + AS2 (ld,%D0,-Y) CR_TAB
540 + AS2 (ld,%C0,-Y) CR_TAB
541 + AS2 (ld,__tmp_reg__,-Y) CR_TAB
542 + AS2 (ld,%A0,-Y) CR_TAB
543 + AS2 (subi,%A2,lo8(%o1)) CR_TAB
544 + AS2 (sbci,%B2,hi8(%o1)) CR_TAB
545 + AS2 (mov,%B0,__tmp_reg__))
546 + : (AS2 (ldd,%D0,%D1) CR_TAB
547 + AS2 (ldd,%C0,%C1) CR_TAB
548 + AS2 (ldd,__tmp_reg__,%B1) CR_TAB
549 + AS2 (ldd,%A0,%A1) CR_TAB
550 + AS2 (mov,%B0,__tmp_reg__));
551 + if(REGNO(op[2]) == REG_Z)
552 + return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
553 + AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
554 + AS2 (ld,%D0,-Z) CR_TAB
555 + AS2 (ld,%C0,-Z) CR_TAB
556 + AS2 (ld,__tmp_reg__,-Z) CR_TAB
557 + AS2 (ld,%A0,-Z) CR_TAB
558 + AS2 (subi,%A2,lo8(%o1)) CR_TAB
559 + AS2 (sbci,%B2,hi8(%o1)) CR_TAB
560 + AS2 (mov,%B0,__tmp_reg__))
561 + : (AS2 (ldd,%D0,%D1) CR_TAB
562 AS2 (ldd,%C0,%C1) CR_TAB
563 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
564 AS2 (ldd,%A0,%A1) CR_TAB
565 AS2 (mov,%B0,__tmp_reg__));
567 else if (reg_dest == reg_base - 2)
568 - return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
570 + op[2] = XEXP(base, 0);
572 + if(REGNO(op[2]) == REG_Y)
573 + return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
574 + AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
575 + AS2 (ld,%A0,Y+) CR_TAB
576 + AS2 (ld,%B0,Y+) CR_TAB
577 + AS2 (ld,__tmp_reg__,Y+) CR_TAB
578 + AS2 (ld,%D0,Y) CR_TAB
579 + AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
580 + AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB
581 + AS2 (mov,%C0,__tmp_reg__))
582 + : (AS2 (ldd,%A0,%A1) CR_TAB
583 AS2 (ldd,%B0,%B1) CR_TAB
584 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
585 AS2 (ldd,%D0,%D1) CR_TAB
586 AS2 (mov,%C0,__tmp_reg__));
587 - return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
588 + if(REGNO(op[2]) == REG_Z)
589 + return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
590 + AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
591 + AS2 (ld,%A0,Z+) CR_TAB
592 + AS2 (ld,%B0,Z+) CR_TAB
593 + AS2 (ld,__tmp_reg__,Z+) CR_TAB
594 + AS2 (ld,%D0,Z) CR_TAB
595 + AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
596 + AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB
597 + AS2 (mov,%C0,__tmp_reg__))
598 + : (AS2 (ldd,%A0,%A1) CR_TAB
599 + AS2 (ldd,%B0,%B1) CR_TAB
600 + AS2 (ldd,__tmp_reg__,%C1) CR_TAB
601 + AS2 (ldd,%D0,%D1) CR_TAB
602 + AS2 (mov,%C0,__tmp_reg__));
604 + op[2] = XEXP(base, 0);
605 + if(REGNO(op[2]) == REG_Y)
606 + return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
607 + AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
608 + AS2 (ld,%A0,Y+) CR_TAB
609 + AS2 (ld,%B0,Y+) CR_TAB
610 + AS2 (ld,%C0,Y+) CR_TAB
611 + AS2 (ld,%D0,Y) CR_TAB
612 + AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
613 + AS2 (sbci,%B2,hi8(%o1+3)))
614 + : (AS2 (ldd,%A0,%A1) CR_TAB
615 + AS2 (ldd,%B0,%B1) CR_TAB
616 + AS2 (ldd,%C0,%C1) CR_TAB
617 + AS2 (ldd,%D0,%D1));
618 + if(REGNO(op[2]) == REG_Z)
619 + return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
620 + AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
621 + AS2 (ld,%A0,Z+) CR_TAB
622 + AS2 (ld,%B0,Z+) CR_TAB
623 + AS2 (ld,%C0,Z+) CR_TAB
624 + AS2 (ld,%D0,Z) CR_TAB
625 + AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
626 + AS2 (sbci,%B2,hi8(%o1+3)))
627 + : (AS2 (ldd,%A0,%A1) CR_TAB
628 AS2 (ldd,%B0,%B1) CR_TAB
629 AS2 (ldd,%C0,%C1) CR_TAB
631 @@ -2504,14 +2862,30 @@ out_movsi_mr_r (rtx insn, rtx op[], int
633 /* "st X+,r26" is undefined */
634 if (reg_unused_after (insn, base))
635 - return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
636 + return *l=6, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
637 + AS2 (st,X,r26) CR_TAB
638 + AS2 (subi,r26,lo8(-1)) CR_TAB
639 + AS2 (sbci,r27,hi8(-1)) CR_TAB
640 + AS2 (st,X+,__tmp_reg__) CR_TAB
641 + AS2 (st,X+,r28) CR_TAB
643 + : (AS2 (mov,__tmp_reg__,r27) CR_TAB
644 AS2 (st,X,r26) CR_TAB
645 AS2 (adiw,r26,1) CR_TAB
646 AS2 (st,X+,__tmp_reg__) CR_TAB
647 AS2 (st,X+,r28) CR_TAB
650 - return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
651 + return *l=7, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
652 + AS2 (st,X,r26) CR_TAB
653 + AS2 (subi,r26,lo8(-1)) CR_TAB
654 + AS2 (sbci,r27,hi8(-1)) CR_TAB
655 + AS2 (st,X+,__tmp_reg__) CR_TAB
656 + AS2 (st,X+,r28) CR_TAB
657 + AS2 (st,X,r29) CR_TAB
658 + AS2 (subi,r26,lo8(3)) CR_TAB
659 + AS2 (sbci,r27,hi8(3)))
660 + : (AS2 (mov,__tmp_reg__,r27) CR_TAB
661 AS2 (st,X,r26) CR_TAB
662 AS2 (adiw,r26,1) CR_TAB
663 AS2 (st,X+,__tmp_reg__) CR_TAB
664 @@ -2530,7 +2904,16 @@ out_movsi_mr_r (rtx insn, rtx op[], int
665 AS2 (st,%0,__tmp_reg__) CR_TAB
666 AS1 (clr,__zero_reg__));
668 - return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
669 + return *l=8, AVR_TINY ? (AS2 (mov,__zero_reg__,%C1) CR_TAB
670 + AS2 (mov,__tmp_reg__,%D1) CR_TAB
671 + AS2 (st,%0+,%A1) CR_TAB
672 + AS2 (st,%0+,%B1) CR_TAB
673 + AS2 (st,%0+,__zero_reg__) CR_TAB
674 + AS2 (st,%0,__tmp_reg__) CR_TAB
675 + AS1 (clr,__zero_reg__) CR_TAB
676 + AS2 (subi,r26,lo8(3)) CR_TAB
677 + AS2 (sbci,r27,hi8(3)))
678 + : (AS2 (mov,__zero_reg__,%C1) CR_TAB
679 AS2 (mov,__tmp_reg__,%D1) CR_TAB
680 AS2 (st,%0+,%A1) CR_TAB
681 AS2 (st,%0+,%B1) CR_TAB
682 @@ -2539,18 +2922,44 @@ out_movsi_mr_r (rtx insn, rtx op[], int
683 AS1 (clr,__zero_reg__) CR_TAB
686 - return *l=5, (AS2 (st,%0+,%A1) CR_TAB
687 + return *l=5, AVR_TINY ? (AS2 (st,%0+,%A1) CR_TAB
688 + AS2 (st,%0+,%B1) CR_TAB
689 + AS2 (st,%0+,%C1) CR_TAB
690 + AS2 (st,%0,%D1) CR_TAB
691 + AS2 (subi,r26,lo8(3)) CR_TAB
692 + AS2 (sbci,r27,hi8(3)))
693 + : (AS2 (st,%0+,%A1) CR_TAB
694 AS2 (st,%0+,%B1) CR_TAB
695 AS2 (st,%0+,%C1) CR_TAB
696 AS2 (st,%0,%D1) CR_TAB
700 - return *l=4, (AS2 (st,%0,%A1) CR_TAB
702 + if(reg_base == REG_Y)
703 + return *l=4, AVR_TINY ? (AS2 (st,Y+,%A1) CR_TAB
704 + AS2 (st,Y+,%B1) CR_TAB
705 + AS2 (st,Y+,%C1) CR_TAB
706 + AS2 (st,Y,%D1) CR_TAB
707 + AS2 (subi,r28,lo8(3)) CR_TAB
708 + AS2 (sbci,r29,lo8(3)))
709 + : (AS2 (st,%0,%A1) CR_TAB
710 + AS2 (std,%0+1,%B1) CR_TAB
711 + AS2 (std,%0+2,%C1) CR_TAB
712 + AS2 (std,%0+3,%D1));
713 + if(reg_base == REG_Z)
714 + return *l=4, AVR_TINY ? (AS2 (st,Z+,%A1) CR_TAB
715 + AS2 (st,Z+,%B1) CR_TAB
716 + AS2 (st,Z+,%C1) CR_TAB
717 + AS2 (st,Z,%D1) CR_TAB
718 + AS2 (subi,r30,lo8(3)) CR_TAB
719 + AS2 (sbci,r31,lo8(3)))
720 + : (AS2 (st,%0,%A1) CR_TAB
721 AS2 (std,%0+1,%B1) CR_TAB
722 AS2 (std,%0+2,%C1) CR_TAB
726 else if (GET_CODE (base) == PLUS) /* (R + i) */
728 int disp = INTVAL (XEXP (base, 1));
729 @@ -2561,14 +2970,35 @@ out_movsi_mr_r (rtx insn, rtx op[], int
730 fatal_insn ("incorrect insn:",insn);
732 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
733 - return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
734 + return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-60))) CR_TAB
735 + AS2 (sbci,r29,hi8(-(%o0-60))) CR_TAB
736 + AS2 (subi,r28,lo8(-60)) CR_TAB
737 + AS2 (sbci,r29,lo8(-60)) CR_TAB
738 + AS2 (st,Y+,%A1) CR_TAB
739 + AS2 (st,Y+,%B1) CR_TAB
740 + AS2 (st,Y+,%C1) CR_TAB
741 + AS2 (st,Y,%D1) CR_TAB
742 + AS2 (subi,r28,lo8(63)) CR_TAB
743 + AS2 (sbci,r29,lo8(63)) CR_TAB
744 + AS2 (subi,r28,lo8(%o0-60)) CR_TAB
745 + AS2 (sbci,r29,hi8(%o0-60)))
746 + : (AS2 (adiw,r28,%o0-60) CR_TAB
747 AS2 (std,Y+60,%A1) CR_TAB
748 AS2 (std,Y+61,%B1) CR_TAB
749 AS2 (std,Y+62,%C1) CR_TAB
750 AS2 (std,Y+63,%D1) CR_TAB
751 AS2 (sbiw,r28,%o0-60));
753 - return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
754 + return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
755 + AS2 (sbci,r29,hi8(-%o0)) CR_TAB
756 + AS2 (st,Y+,%A1) CR_TAB
757 + AS2 (st,Y+,%B1) CR_TAB
758 + AS2 (st,Y+,%C1) CR_TAB
759 + AS2 (st,Y,%D1) CR_TAB
760 + AS2 (subi,r28,lo8(3)) CR_TAB
761 + AS2 (sbci,r29,lo8(3)) CR_TAB
762 + AS2 (subi,r28,lo8(%o0)) CR_TAB
763 + AS2 (sbci,r29,hi8(%o0)))
764 + : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
765 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
766 AS2 (st,Y,%A1) CR_TAB
767 AS2 (std,Y+1,%B1) CR_TAB
768 @@ -2583,7 +3013,18 @@ out_movsi_mr_r (rtx insn, rtx op[], int
769 if (reg_src == REG_X)
772 - return (AS2 (mov,__tmp_reg__,r26) CR_TAB
773 + return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
774 + AS2 (mov,__zero_reg__,r27) CR_TAB
775 + AS2 (subi,r26,lo8(-(%o0))) CR_TAB
776 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
777 + AS2 (st,X+,__tmp_reg__) CR_TAB
778 + AS2 (st,X+,__zero_reg__) CR_TAB
779 + AS2 (st,X+,r28) CR_TAB
780 + AS2 (st,X,r29) CR_TAB
781 + AS1 (clr,__zero_reg__) CR_TAB
782 + AS2 (subi,r26,lo8(%o0+3)) CR_TAB
783 + AS2 (sbci,r27,hi8(%o0+3)))
784 + : (AS2 (mov,__tmp_reg__,r26) CR_TAB
785 AS2 (mov,__zero_reg__,r27) CR_TAB
786 AS2 (adiw,r26,%o0) CR_TAB
787 AS2 (st,X+,__tmp_reg__) CR_TAB
788 @@ -2596,7 +3037,18 @@ out_movsi_mr_r (rtx insn, rtx op[], int
789 else if (reg_src == REG_X - 2)
792 - return (AS2 (mov,__tmp_reg__,r26) CR_TAB
793 + return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
794 + AS2 (mov,__zero_reg__,r27) CR_TAB
795 + AS2 (subi,r26,lo8(-(%o0))) CR_TAB
796 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
797 + AS2 (st,X+,r24) CR_TAB
798 + AS2 (st,X+,r25) CR_TAB
799 + AS2 (st,X+,__tmp_reg__) CR_TAB
800 + AS2 (st,X,__zero_reg__) CR_TAB
801 + AS1 (clr,__zero_reg__) CR_TAB
802 + AS2 (subi,r26,lo8(%o0+3)) CR_TAB
803 + AS2 (sbci,r27,hi8(%o0+3)))
804 + : (AS2 (mov,__tmp_reg__,r26) CR_TAB
805 AS2 (mov,__zero_reg__,r27) CR_TAB
806 AS2 (adiw,r26,%o0) CR_TAB
807 AS2 (st,X+,r24) CR_TAB
808 @@ -2607,14 +3059,46 @@ out_movsi_mr_r (rtx insn, rtx op[], int
809 AS2 (sbiw,r26,%o0+3));
812 - return (AS2 (adiw,r26,%o0) CR_TAB
813 + return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
814 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
815 + AS2 (st,X+,%A1) CR_TAB
816 + AS2 (st,X+,%B1) CR_TAB
817 + AS2 (st,X+,%C1) CR_TAB
818 + AS2 (st,X,%D1) CR_TAB
819 + AS2 (subi,r26,lo8(%o0+3)) CR_TAB
820 + AS2 (sbci,r27,hi8(%o0+3)))
821 + : (AS2 (adiw,r26,%o0) CR_TAB
822 AS2 (st,X+,%A1) CR_TAB
823 AS2 (st,X+,%B1) CR_TAB
824 AS2 (st,X+,%C1) CR_TAB
825 AS2 (st,X,%D1) CR_TAB
826 AS2 (sbiw,r26,%o0+3));
828 - return *l=4, (AS2 (std,%A0,%A1) CR_TAB
829 + op[2] = XEXP(base, 0);
830 + if(REGNO(op[2]) == REG_Y)
831 + return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
832 + AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
833 + AS2 (st,Y+,%A1) CR_TAB
834 + AS2 (st,Y+,%B1) CR_TAB
835 + AS2 (st,Y+,%C1) CR_TAB
836 + AS2 (st,Y,%D1) CR_TAB
837 + AS2 (subi,%A2,lo8(%o0+3)) CR_TAB
838 + AS2 (sbci,%B2,hi8(%o0+3)))
839 + : (AS2 (std,%A0,%A1) CR_TAB
840 + AS2 (std,%B0,%B1) CR_TAB
841 + AS2 (std,%C0,%C1) CR_TAB
842 + AS2 (std,%D0,%D1));
844 + if(REGNO(op[2]) == REG_Z)
845 + return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
846 + AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
847 + AS2 (st,Z+,%A1) CR_TAB
848 + AS2 (st,Z+,%B1) CR_TAB
849 + AS2 (st,Z+,%C1) CR_TAB
850 + AS2 (st,Z,%D1) CR_TAB
851 + AS2 (subi,%A2,lo8(%o0+3)) CR_TAB
852 + AS2 (sbci,%B2,hi8(%o0+3)))
853 + : (AS2 (std,%A0,%A1) CR_TAB
854 AS2 (std,%B0,%B1) CR_TAB
855 AS2 (std,%C0,%C1) CR_TAB
857 @@ -2831,7 +3315,16 @@ out_movqi_mr_r (rtx insn, rtx op[], int
858 fatal_insn ("incorrect insn:",insn);
860 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
861 - return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
862 + return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-63))) CR_TAB
863 + AS2 (sbci,r29,hi8(-(%o0-63))) CR_TAB
864 + AS2 (subi,r28,lo8(-63)) CR_TAB
865 + AS2 (sbci,r29,hi8(-63)) CR_TAB
866 + AS2 (st,Y,%1) CR_TAB
867 + AS2 (subi,r28,lo8(63)) CR_TAB
868 + AS2 (sbci,r29,hi8(63)) CR_TAB
869 + AS2 (subi,r28,lo8(%o0-63)) CR_TAB
870 + AS2 (sbci,r29,hi8(%o0-63)))
871 + : (AS2 (adiw,r28,%o0-63) CR_TAB
872 AS2 (std,Y+63,%1) CR_TAB
873 AS2 (sbiw,r28,%o0-63));
875 @@ -2846,11 +3339,21 @@ out_movqi_mr_r (rtx insn, rtx op[], int
876 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
878 if (reg_unused_after (insn, XEXP (x,0)))
879 - return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
880 + return *l = 3, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB
881 + AS2 (subi,r26,lo8(-(%o0))) CR_TAB
882 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
883 + AS2 (st,X,__tmp_reg__))
884 + : (AS2 (mov,__tmp_reg__,%1) CR_TAB
885 AS2 (adiw,r26,%o0) CR_TAB
886 AS2 (st,X,__tmp_reg__));
888 - return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
889 + return *l = 4, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB
890 + AS2 (subi,r26,lo8(-(%o0))) CR_TAB
891 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
892 + AS2 (st,X,__tmp_reg__) CR_TAB
893 + AS2 (subi,r26,lo8(%o0)) CR_TAB
894 + AS2 (sbci,r27,hi8(%o0)))
895 + : (AS2 (mov,__tmp_reg__,%1) CR_TAB
896 AS2 (adiw,r26,%o0) CR_TAB
897 AS2 (st,X,__tmp_reg__) CR_TAB
899 @@ -2858,16 +3361,38 @@ out_movqi_mr_r (rtx insn, rtx op[], int
902 if (reg_unused_after (insn, XEXP (x,0)))
903 - return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
904 + return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
905 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
907 + : (AS2 (adiw,r26,%o0) CR_TAB
910 - return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
911 + return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
912 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
913 + AS2 (st,X,%1) CR_TAB
914 + AS2 (subi,r26,lo8(%o0)) CR_TAB
915 + AS2 (sbci,r27,hi8(%o0)))
916 + : (AS2 (adiw,r26,%o0) CR_TAB
922 - return AS2 (std,%0,%1);
923 + op[2] = XEXP(x, 0);
924 + if(REGNO(op[2]) == REG_Y)
925 + return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
926 + AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
927 + AS2 (st,Y,%1) CR_TAB
928 + AS2 (subi,%A2,lo8(%o0)) CR_TAB
929 + AS2 (sbci,%B2,hi8(%o0)))
931 + if(REGNO(op[2]) == REG_Z)
932 + return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
933 + AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
934 + AS2 (st,Z,%1) CR_TAB
935 + AS2 (subi,%A2,lo8(%o0)) CR_TAB
936 + AS2 (sbci,%B2,hi8(%o0)))
940 return AS2 (st,%0,%1);
941 @@ -2916,20 +3441,39 @@ out_movhi_mr_r (rtx insn, rtx op[], int
943 /* "st X+,r26" and "st -X,r26" are undefined. */
944 if (!mem_volatile_p && reg_unused_after (insn, src))
945 - return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
946 + return *l=4, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
947 + AS2 (st,X,r26) CR_TAB
948 + AS2 (subi,r26,lo8(-1)) CR_TAB
949 + AS2 (sbci,r27,hi8(-1)) CR_TAB
950 + AS2 (st,X,__tmp_reg__))
951 + : (AS2 (mov,__tmp_reg__,r27) CR_TAB
952 AS2 (st,X,r26) CR_TAB
953 AS2 (adiw,r26,1) CR_TAB
954 AS2 (st,X,__tmp_reg__));
958 - return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
959 + return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
960 + AS2 (subi,r26,lo8(-1)) CR_TAB
961 + AS2 (sbci,r27,hi8(-1)) CR_TAB
962 + AS2 (st,X,__tmp_reg__) CR_TAB
963 + AS2 (subi,r26,lo8(1)) CR_TAB
964 + AS2 (sbci,r27,hi8(1)) CR_TAB
966 + : (AS2 (mov,__tmp_reg__,r27) CR_TAB
967 AS2 (adiw,r26,1) CR_TAB
968 AS2 (st,X,__tmp_reg__) CR_TAB
969 AS2 (sbiw,r26,1) CR_TAB
972 - return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
973 + return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
974 + AS2 (st,X,r26) CR_TAB
975 + AS2 (subi,r26,lo8(-1)) CR_TAB
976 + AS2 (sbci,r27,hi8(-1)) CR_TAB
977 + AS2 (st,X,__tmp_reg__) CR_TAB
978 + AS2 (subi,r26,lo8(1)) CR_TAB
979 + AS2 (sbci,r27,hi8(1)))
980 + : (AS2 (mov,__tmp_reg__,r27) CR_TAB
981 AS2 (st,X,r26) CR_TAB
982 AS2 (adiw,r26,1) CR_TAB
983 AS2 (st,X,__tmp_reg__) CR_TAB
984 @@ -2944,11 +3488,19 @@ out_movhi_mr_r (rtx insn, rtx op[], int
988 - return *l=3, (AS2 (adiw,r26,1) CR_TAB
989 + return *l=3, AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB
990 + AS2 (sbci,r27,hi8(-1)) CR_TAB
991 + AS2 (st,X,%B1) CR_TAB
993 + : (AS2 (adiw,r26,1) CR_TAB
994 AS2 (st,X,%B1) CR_TAB
997 - return *l=3, (AS2 (st,X+,%A1) CR_TAB
998 + return *l=3, AVR_TINY ? (AS2 (st,X+,%A1) CR_TAB
999 + AS2 (st,X,%B1) CR_TAB
1000 + AS2 (subi,r26,lo8(1)) CR_TAB
1001 + AS2 (sbci,r27,hi8(1)))
1002 + : (AS2 (st,X+,%A1) CR_TAB
1003 AS2 (st,X,%B1) CR_TAB
1006 @@ -2957,13 +3509,41 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1010 - return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
1012 + if(reg_base == REG_Y)
1013 + return *l=2, AVR_TINY ? (AS2 (subi,r28,lo8(-1)) CR_TAB
1014 + AS2 (sbci,r29,hi8(-1)) CR_TAB
1015 + AS2 (st,Y,%B1) CR_TAB
1017 + : (AS2 (std,%0+1,%B1) CR_TAB
1019 + if(reg_base == REG_Z)
1020 + return *l=2, AVR_TINY ? (AS2 (subi,r30,lo8(-1)) CR_TAB
1021 + AS2 (sbci,r31,hi8(-1)) CR_TAB
1022 + AS2 (st,Z,%B1) CR_TAB
1024 + : (AS2 (std,%0+1,%B1) CR_TAB
1028 - return *l=2, (AS2 (st,%0,%A1) CR_TAB
1030 + if(reg_base == REG_Y)
1031 + return *l=2, AVR_TINY ? (AS2 (st,Y+,%A1) CR_TAB
1032 + AS2 (st,Y,%B1) CR_TAB
1033 + AS2 (subi,r28,lo8(1)) CR_TAB
1034 + AS2 (sbci,r29,hi8(1)))
1035 + : (AS2 (st,%0,%A1) CR_TAB
1036 + AS2 (std,%0+1,%B1));
1037 + if(reg_base == REG_Z)
1038 + return *l=2, AVR_TINY ? (AS2 (st,Z+,%A1) CR_TAB
1039 + AS2 (st,Z,%B1) CR_TAB
1040 + AS2 (subi,r30,lo8(1)) CR_TAB
1041 + AS2 (sbci,r31,hi8(1)))
1042 + : (AS2 (st,%0,%A1) CR_TAB
1043 AS2 (std,%0+1,%B1));
1047 else if (GET_CODE (base) == PLUS)
1049 int disp = INTVAL (XEXP (base, 1));
1050 @@ -2976,12 +3556,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1053 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
1054 - return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
1055 + return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
1056 + AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
1057 + AS2 (subi,r28,lo8(-63)) CR_TAB
1058 + AS2 (sbci,r29,hi8(-63)) CR_TAB
1059 + AS2 (st,Y,%B1) CR_TAB
1060 + AS2 (st,-Y,%A1) CR_TAB
1061 + AS2 (subi,r28,lo8(62)) CR_TAB
1062 + AS2 (sbci,r29,hi8(62)) CR_TAB
1063 + AS2 (subi,r28,lo8(%o0-62)) CR_TAB
1064 + AS2 (sbci,r29,hi8(%o0-62)))
1065 + : (AS2 (adiw,r28,%o0-62) CR_TAB
1066 AS2 (std,Y+63,%B1) CR_TAB
1067 AS2 (std,Y+62,%A1) CR_TAB
1068 AS2 (sbiw,r28,%o0-62));
1070 - return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1071 + return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1072 + AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1073 + AS2 (subi,r28,lo8(-1)) CR_TAB
1074 + AS2 (sbci,r29,hi8(-1)) CR_TAB
1075 + AS2 (st,Y,%B1) CR_TAB
1076 + AS2 (st,-Y,%A1) CR_TAB
1077 + AS2 (subi,r28,lo8(%o0)) CR_TAB
1078 + AS2 (sbci,r29,hi8(%o0)))
1079 + : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1080 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1081 AS2 (std,Y+1,%B1) CR_TAB
1082 AS2 (st,Y,%A1) CR_TAB
1083 @@ -2991,12 +3589,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1086 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
1087 - return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
1088 + return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
1089 + AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
1090 + AS2 (subi,r28,lo8(-62)) CR_TAB
1091 + AS2 (sbci,r29,hi8(-62)) CR_TAB
1092 + AS2 (st,Y+,%A1) CR_TAB
1093 + AS2 (st,Y,%B1) CR_TAB
1094 + AS2 (subi,r28,lo8(63)) CR_TAB
1095 + AS2 (sbci,r29,hi8(63)) CR_TAB
1096 + AS2 (subi,r28,lo8(%o0-62)) CR_TAB
1097 + AS2 (sbci,r29,hi8(%o0-62)))
1098 + : (AS2 (adiw,r28,%o0-62) CR_TAB
1099 AS2 (std,Y+62,%A1) CR_TAB
1100 AS2 (std,Y+63,%B1) CR_TAB
1101 AS2 (sbiw,r28,%o0-62));
1103 - return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1104 + return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1105 + AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1106 + AS2 (st,Y+,%A1) CR_TAB
1107 + AS2 (st,Y,%B1) CR_TAB
1108 + AS2 (subi,r28,lo8(1)) CR_TAB
1109 + AS2 (sbci,r29,hi8(1)) CR_TAB
1110 + AS2 (subi,r28,lo8(%o0)) CR_TAB
1111 + AS2 (sbci,r29,hi8(%o0)))
1112 + : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1113 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1114 AS2 (st,Y,%A1) CR_TAB
1115 AS2 (std,Y+1,%B1) CR_TAB
1116 @@ -3012,7 +3628,16 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1120 - return (AS2 (mov,__tmp_reg__,r26) CR_TAB
1121 + return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
1122 + AS2 (mov,__zero_reg__,r27) CR_TAB
1123 + AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB
1124 + AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB
1125 + AS2 (st,X,__zero_reg__) CR_TAB
1126 + AS2 (st,-X,__tmp_reg__) CR_TAB
1127 + AS1 (clr,__zero_reg__) CR_TAB
1128 + AS2 (subi,r26,lo8(%o0)) CR_TAB
1129 + AS2 (sbci,r27,hi8(%o0)))
1130 + : (AS2 (mov,__tmp_reg__,r26) CR_TAB
1131 AS2 (mov,__zero_reg__,r27) CR_TAB
1132 AS2 (adiw,r26,%o0+1) CR_TAB
1133 AS2 (st,X,__zero_reg__) CR_TAB
1134 @@ -3023,19 +3648,35 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1138 - return (AS2 (mov,__tmp_reg__,r26) CR_TAB
1139 + return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
1140 AS2 (mov,__zero_reg__,r27) CR_TAB
1141 - AS2 (adiw,r26,%o0) CR_TAB
1142 + AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1143 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1144 AS2 (st,X+,__tmp_reg__) CR_TAB
1145 AS2 (st,X,__zero_reg__) CR_TAB
1146 AS1 (clr,__zero_reg__) CR_TAB
1147 - AS2 (sbiw,r26,%o0+1));
1148 + AS2 (subi,r26,lo8(%o0+1)) CR_TAB
1149 + AS2 (sbci,r27,hi8(%o0+1)))
1150 + : (AS2 (mov,__tmp_reg__,r26) CR_TAB
1151 + AS2 (mov,__zero_reg__,r27) CR_TAB
1152 + AS2 (adiw,r26,%o0+1) CR_TAB
1153 + AS2 (st,X+,__tmp_reg__) CR_TAB
1154 + AS2 (st,X,__zero_reg__) CR_TAB
1155 + AS1 (clr,__zero_reg__) CR_TAB
1156 + AS2 (sbiw,r26,%o0));
1163 - return (AS2 (adiw,r26,%o0+1) CR_TAB
1164 + return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB
1165 + AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB
1166 + AS2 (st,X,%B1) CR_TAB
1167 + AS2 (st,-X,%A1) CR_TAB
1168 + AS2 (subi,r26,lo8(%o0)) CR_TAB
1169 + AS2 (sbci,r27,hi8(%o0)))
1170 + : (AS2 (adiw,r26,%o0+1) CR_TAB
1171 AS2 (st,X,%B1) CR_TAB
1172 AS2 (st,-X,%A1) CR_TAB
1173 AS2 (sbiw,r26,%o0));
1174 @@ -3043,7 +3684,13 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1178 - return (AS2 (adiw,r26,%o0) CR_TAB
1179 + return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1180 + AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1181 + AS2 (st,X+,%A1) CR_TAB
1182 + AS2 (st,X,%B1) CR_TAB
1183 + AS2 (subi,r26,lo8(%o0)) CR_TAB
1184 + AS2 (sbci,r27,hi8(%o0)))
1185 + : (AS2 (adiw,r26,%o0) CR_TAB
1186 AS2 (st,X+,%A1) CR_TAB
1187 AS2 (st,X,%B1) CR_TAB
1188 AS2 (sbiw,r26,%o0+1));
1189 @@ -3051,11 +3698,49 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1193 - return *l=2, (AS2 (std,%B0,%B1) CR_TAB
1195 + op[2] = XEXP(base, 0);
1196 + if(REGNO(op[2]) == REG_Y)
1197 + return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+2))) CR_TAB
1198 + AS2 (sbci,%B2,hi8(-(%o0+2))) CR_TAB
1199 + AS2 (st,-Y,%B1) CR_TAB
1200 + AS2 (st,-Y,%A1) CR_TAB
1201 + AS2 (subi,%A2,lo8(%o0)) CR_TAB
1202 + AS2 (sbci,%B2,hi8(%o0)))
1203 + : (AS2 (std,%B0,%B1) CR_TAB
1205 + if(REGNO(op[2]) == REG_Z)
1206 + return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+1))) CR_TAB
1207 + AS2 (sbci,%B2,hi8(-(%o0+1))) CR_TAB
1208 + AS2 (st,-Z,%B1) CR_TAB
1209 + AS2 (st,-Z,%A1) CR_TAB
1210 + AS2 (subi,%A2,lo8(%o0)) CR_TAB
1211 + AS2 (sbci,%B2,hi8(%o0)))
1212 + : (AS2 (std,%B0,%B1) CR_TAB
1213 + AS2 (std,%A0,%A1));
1216 - return *l=2, (AS2 (std,%A0,%A1) CR_TAB
1218 + op[2] = XEXP(base, 0);
1219 + if(REGNO(op[2]) == REG_Y)
1220 + return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1221 + AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1222 + AS2 (st,Y+,%A1) CR_TAB
1223 + AS2 (st,Y,%B1) CR_TAB
1224 + AS2 (subi,%A2,lo8(%o0+1)) CR_TAB
1225 + AS2 (sbci,%B2,hi8(%o0+1)))
1226 + : (AS2 (std,%A0,%A1) CR_TAB
1228 + if(REGNO(op[2]) == REG_Z)
1229 + return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1230 + AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1231 + AS2 (st,Z+,%A1) CR_TAB
1232 + AS2 (st,Z,%B1) CR_TAB
1233 + AS2 (subi,%A2,lo8(%o0+1)) CR_TAB
1234 + AS2 (sbci,%B2,hi8(%o0+1)))
1235 + : (AS2 (std,%A0,%A1) CR_TAB
1236 + AS2 (std,%B0,%B1));
1239 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1241 @@ -3075,15 +3760,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int
1242 if (REGNO (XEXP (base, 0)) == REG_X)
1245 - return (AS2 (adiw,r26,1) CR_TAB
1246 + return AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB
1247 + AS2 (sbci,r27,hi8(-1)) CR_TAB
1248 + AS2 (st,X,%B1) CR_TAB
1249 + AS2 (st,-X,%A1) CR_TAB
1250 + AS2 (subi,r26,lo8(-2)) CR_TAB
1251 + AS2 (sbci,r27,hi8(-2)))
1252 + : (AS2 (adiw,r26,1) CR_TAB
1253 AS2 (st,X,%B1) CR_TAB
1254 AS2 (st,-X,%A1) CR_TAB
1259 + //FIXME:check the code once again for AVR_TINY
1261 - return (AS2 (std,%p0+1,%B1) CR_TAB
1262 + return AVR_TINY ? (AS2 (subi,%A0,lo8(-1)) CR_TAB
1263 + AS2 (sbci,%B0,hi8(-1)) CR_TAB
1264 + AS2 (st,%p0,%B1) CR_TAB
1265 + AS2 (subi,%A0,lo8(1)) CR_TAB
1266 + AS2 (sbci,%B0,hi8(1)) CR_TAB
1267 + AS2 (st,%p0,%A1) CR_TAB
1268 + AS2 (subi,%A0,lo8(-3)) CR_TAB
1269 + AS2 (sbci,%B0,hi8(-3)))
1270 + : (AS2 (std,%p0+1,%B1) CR_TAB
1271 AS2 (st,%p0,%A1) CR_TAB
1274 @@ -3173,7 +3873,9 @@ out_tsthi (rtx insn, rtx op, int *l)
1275 if (test_hard_reg_class (ADDW_REGS, op))
1278 - return AS2 (sbiw,%0,0);
1279 + return AVR_TINY ? (AS2 (subi,%A0,lo8(0)) CR_TAB
1280 + AS2 (sbci,%B0,hi8(0)))
1281 + : AS2 (sbiw,%0,0);
1284 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
1285 @@ -3194,7 +3896,11 @@ out_tstsi (rtx insn, rtx op, int *l)
1286 if (test_hard_reg_class (ADDW_REGS, op))
1289 - return (AS2 (sbiw,%A0,0) CR_TAB
1290 + return AVR_TINY ? (AS2 (subi,%A0,lo8(-(-0))) CR_TAB
1291 + AS2 (sbci,%B0,hi8(-(-0))) CR_TAB
1292 + AS2 (cpc,%C0,__zero_reg__) CR_TAB
1293 + AS2 (cpc,%D0,__zero_reg__))
1294 + : (AS2 (sbiw,%A0,0) CR_TAB
1295 AS2 (cpc,%C0,__zero_reg__) CR_TAB
1296 AS2 (cpc,%D0,__zero_reg__));
1298 @@ -5571,10 +6277,12 @@ avr_file_start (void)
1299 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
1300 fputs ("__SREG__ = 0x3f\n"
1302 - "__SP_L__ = 0x3d\n"
1303 - "__CCP__ = 0x34\n", asm_out_file);
1304 + "__SP_L__ = 0x3d\n", asm_out_file);
1306 + AVR_TINY ? fputs ("__CCP__ = 0x3c\n", asm_out_file) : fputs ("__CCP__ = 0x34\n", asm_out_file);
1308 - fputs ("__tmp_reg__ = 0\n"
1309 + AVR_TINY ? fputs ("__tmp_reg__ = 16\n"
1310 + "__zero_reg__ = 17\n", asm_out_file) : fputs ("__tmp_reg__ = 0\n"
1311 "__zero_reg__ = 1\n", asm_out_file);
1313 /* FIXME: output these only if there is anything in the .data / .bss
1314 diff -Naurp gcc/config/avr/avr-c.c gcc/config/avr/avr-c.c
1315 --- gcc/config/avr/avr-c.c 2011-10-27 17:00:24.000000000 +0530
1316 +++ gcc/config/avr/avr-c.c 2011-10-27 17:47:15.000000000 +0530
1317 @@ -94,5 +94,9 @@ avr_cpu_cpp_builtins (struct cpp_reader
1318 cpp_define (pfile, "__AVR_HAVE_RAMPD__");
1321 + if (avr_current_arch->avrtiny)
1323 + cpp_define (pfile, "__AVR_TINY__");
1327 diff -Naurp gcc/config/avr/avr-devices.c gcc/config/avr/avr-devices.c
1328 --- gcc/config/avr/avr-devices.c 2011-10-27 17:00:24.000000000 +0530
1329 +++ gcc/config/avr/avr-devices.c 2011-10-27 17:47:15.000000000 +0530
1331 /* List of all known AVR MCU architectures. */
1333 const struct base_arch_s avr_arch_types[] = {
1334 - { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */
1335 - { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" },
1336 - { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" },
1337 - { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" },
1338 - { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" },
1339 - { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" },
1340 - { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" },
1341 - { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" },
1342 - { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" },
1343 - { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" },
1344 - { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" },
1345 - { 0, 1, 0, 1, 0, 0, 0, 1, 0, 0x2000, "__AVR_ARCH__=101", "avrxmega1" },
1346 - { 0, 1, 1, 1, 0, 0, 0, 1, 0, 0x2000, "__AVR_ARCH__=102", "avrxmega2" },
1347 - { 0, 1, 1, 1, 0, 0, 0, 1, 1, 0x2000, "__AVR_ARCH__=103", "avrxmega3" },
1348 - { 0, 1, 1, 1, 1, 1, 0, 1, 0, 0x2000, "__AVR_ARCH__=104", "avrxmega4" },
1349 - { 0, 1, 1, 1, 1, 1, 0, 1, 1, 0x2000, "__AVR_ARCH__=105", "avrxmega5" },
1350 - { 0, 1, 1, 1, 1, 1, 1, 1, 0, 0x2000, "__AVR_ARCH__=106", "avrxmega6" },
1351 - { 0, 1, 1, 1, 1, 1, 1, 1, 1, 0x2000, "__AVR_ARCH__=107", "avrxmega7" }
1352 + { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */
1353 + { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" },
1354 + { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" },
1355 + { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" },
1356 + { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" },
1357 + { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" },
1358 + { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" },
1359 + { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" },
1360 + { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" },
1361 + { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" },
1362 + { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" },
1363 + { 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=101", "avrxmega1" },
1364 + { 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=102", "avrxmega2" },
1365 + { 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0x2000, "__AVR_ARCH__=103", "avrxmega3" },
1366 + { 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=104", "avrxmega4" },
1367 + { 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0x2000, "__AVR_ARCH__=105", "avrxmega5" },
1368 + { 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0x2000, "__AVR_ARCH__=106", "avrxmega6" },
1369 + { 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0x2000, "__AVR_ARCH__=107", "avrxmega7" },
1370 + { 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0x0040, "__AVR_ARCH__=201", "avrtiny10" }
1373 /* List of all known AVR MCU types - if updated, it has to be kept
1374 @@ -255,6 +256,14 @@ const struct mcu_type_s avr_mcu_types[]
1375 { "avrxmega7", ARCH_AVRXMEGA7, NULL, 0, 0x2000, "x128a1" },
1376 { "atxmega128a1", ARCH_AVRXMEGA7, "__AVR_ATxmega128A1__", 0, 0x2000, "x128a1" },
1377 { "atxmega128a1u", ARCH_AVRXMEGA7, "__AVR_ATxmega128A1U__", 0, 0x2000, "x128a1u" },
1378 + /* tiny10 family */
1379 + { "avrtiny10", ARCH_AVRTINY10, NULL, 0, 0x0040, "tn10" },
1380 + { "attiny4", ARCH_AVRTINY10, "__AVR_ATtiny4__", 0, 0x0040, "tn4" },
1381 + { "attiny5", ARCH_AVRTINY10, "__AVR_ATtiny5__", 0, 0x0040, "tn5" },
1382 + { "attiny9", ARCH_AVRTINY10, "__AVR_ATtiny9__", 0, 0x0040, "tn9" },
1383 + { "attiny10", ARCH_AVRTINY10, "__AVR_ATtiny10__", 0, 0x0040, "tn10" },
1384 + { "attiny20", ARCH_AVRTINY10, "__AVR_ATtiny20__", 0, 0x0040, "tn20" },
1385 + { "attiny40", ARCH_AVRTINY10, "__AVR_ATtiny40__", 0, 0x0040, "tn40" },
1386 /* Assembler only. */
1387 { "avr1", ARCH_AVR1, NULL, 0, 0x0060, "s1200" },
1388 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__", 0, 0x0060, "s1200" },
1389 diff -Naurp gcc/config/avr/avr.h gcc/config/avr/avr.h
1390 --- gcc/config/avr/avr.h 2011-10-27 17:00:24.000000000 +0530
1391 +++ gcc/config/avr/avr.h 2011-10-27 17:47:15.000000000 +0530
1392 @@ -51,6 +51,9 @@ struct base_arch_s {
1393 /* Core have RAMPX, RAMPY and RAMPD registers. */
1396 + /* Core is in avrtiny10 family. */
1399 /* Default start of data section address for architecture. */
1400 int default_data_section_start;
1402 @@ -82,7 +85,8 @@ enum avr_arch
1412 @@ -126,6 +130,7 @@ extern GTY(()) section *progmem_section;
1413 #define AVR_HAVE_EIJMP_EICALL (avr_current_arch->have_eijmp_eicall)
1414 #define AVR_HAVE_8BIT_SP (avr_current_device->short_sp || TARGET_TINY_STACK)
1415 #define AVR_XMEGA (avr_current_arch->xmega)
1416 +#define AVR_TINY (avr_current_arch->avrtiny)
1417 #define AVR_HAVE_RAMPX_Y_D (avr_current_arch->have_rampx_y_d)
1419 #define AVR_2_BYTE_PC (!AVR_HAVE_EIJMP_EICALL)
1420 @@ -245,7 +250,6 @@ extern GTY(()) section *progmem_section;
1422 #define ADJUST_REG_ALLOC_ORDER order_regs_for_local_alloc ()
1425 #define HARD_REGNO_NREGS(REGNO, MODE) ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1427 #define HARD_REGNO_MODE_OK(REGNO, MODE) avr_hard_regno_mode_ok(REGNO, MODE)
1428 @@ -309,6 +313,41 @@ enum reg_class {
1429 {0xffffffff,0x00000003} /* ALL_REGS */ \
1432 +/* Zero or more C statements that may conditionally modify five variables
1433 + fixed_regs, call_used_regs, global_regs, reg_names, and reg_class_contents,
1434 + to take into account any dependence of these register sets on target flags.
1435 + The first three of these are of type char [] (interpreted as Boolean
1436 + vectors). global_regs is a const char *[], and reg_class_contents is a
1437 + HARD_REG_SET. Before the macro is called, fixed_regs, call_used_regs,
1438 + reg_class_contents, and reg_names have been initialized from
1439 + FIXED_REGISTERS, CALL_USED_REGISTERS, REG_CLASS_CONTENTS, and
1440 + REGISTER_NAMES, respectively. global_regs has been cleared, and any
1441 +
\91-ffixed-reg
\92,
\91-fcall-used-reg
\92 and
\91-fcall-saved-reg
\92 command options
1442 + have been applied.
1444 + You need not define this macro if it has no work to do.
1446 + If the usage of an entire class of registers depends on the target flags,
1447 + you may indicate this to GCC by using this macro to modify fixed_regs and
1448 + call_used_regs to 1 for each of the registers in the classes which should
1449 + not be used by GCC. Also define the macro REG_CLASS_FROM_LETTER /
1450 + REG_CLASS_FROM_CONSTRAINT to return NO_REGS if it is called with a letter
1451 + for a class that shouldn
\92t be used. (However, if this class is not included
1452 + in GENERAL_REGS and all of the insn patterns whose constraints permit this
1453 + class are controlled by target switches, then GCC will automatically avoid
1454 + using these registers when the target switches are opposed to them.) */
1456 +#define CONDITIONAL_REGISTER_USAGE \
1459 + for (i = 0; i <= 17; i++) { \
1460 + fixed_regs[i] = 1; \
1461 + call_used_regs[i] = 1; \
1463 + CLEAR_HARD_REG_SET(reg_class_contents[(int)ADDW_REGS]); \
1464 + CLEAR_HARD_REG_SET(reg_class_contents[(int)NO_LD_REGS]); \
1467 #define REGNO_REG_CLASS(R) avr_regno_reg_class(R)
1469 /* The following macro defines cover classes for Integrated Register
1470 diff -Naurp gcc/config/avr/avr.md gcc/config/avr/avr.md
1471 --- gcc/config/avr/avr.md 2011-10-27 17:00:24.000000000 +0530
1472 +++ gcc/config/avr/avr.md 2011-10-27 17:47:15.000000000 +0530
1474 [(set_attr "length" "8")
1475 (set_attr "cc" "none")])
1478 + [(TMP_REGNO_AVRTINY10 16) ; temporary register r16
1479 + (ZERO_REGNO_AVRTINY10 17)]) ; zero register r17
1481 (define_insn "*mov<ALLS:mode>"
1482 [(set (match_operand:ALLS 0 "nonimmediate_operand" "=r,r,r,Qm,!d,r")
1484 rtx addr1 = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
1486 /* Create rtx for tmp register - we use this as scratch. */
1487 - rtx tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
1488 + rtx tmp_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? TMP_REGNO_AVRTINY10 : TMP_REGNO);
1490 if (GET_CODE (operands[2]) != CONST_INT)
1492 @@ -2907,7 +2910,7 @@
1494 (use (label_ref (match_operand 1 "" "")))
1495 (clobber (match_dup 0))]
1496 - "AVR_HAVE_JMP_CALL && !AVR_HAVE_EIJMP_EICALL"
1497 + "(AVR_HAVE_JMP_CALL && !AVR_HAVE_EIJMP_EICALL)"
1501 diff -Naurp gcc/config/avr/libgcc-fixed.S gcc/config/avr/libgcc-fixed.S
1502 --- gcc/config/avr/libgcc-fixed.S 2011-10-27 17:00:03.000000000 +0530
1503 +++ gcc/config/avr/libgcc-fixed.S 2011-10-27 17:47:15.000000000 +0530
1504 @@ -29,13 +29,17 @@ Boston, MA 02110-1301, USA. */
1506 /* Fixed point library routines for avr. */
1508 +#if defined (__AVR_TINY__)
1509 +#define __zero_reg__ r17
1510 +#define __tmp_reg__ r16
1512 #define __zero_reg__ r1
1513 #define __tmp_reg__ r0
1515 #define __SREG__ 0x3f
1516 #define __SP_H__ 0x3e
1517 #define __SP_L__ 0x3d
1518 #define __RAMPZ__ 0x3B
1520 /* Conversions to float. */
1521 #if defined (L_fractqqsf)
1523 @@ -281,15 +285,15 @@ __muluqq3_exit:
1526 fmuls r_arg1H, r_arg2H
1528 + movw r_resL, __tmp_reg__
1529 fmulsu r_arg2H, r_arg1L
1533 + add r_resL, __zero_reg__
1535 fmulsu r_arg1H, r_arg2L
1538 + add r_resL, __zero_reg__
1542 @@ -301,13 +305,13 @@ __mulhq3:
1545 mul r_arg1H, r_arg2H
1547 + movw r_resL, __tmp_reg__
1548 mul r_arg1H, r_arg2L
1550 + add r_resL, __zero_reg__
1552 adc r_resH, __zero_reg__
1553 mul r_arg1L, r_arg2H
1555 + add r_resL, __zero_reg__
1557 adc r_resH, __zero_reg__
1559 @@ -401,15 +405,15 @@ __muluhq3_skip:
1562 mul r_arg1L, r_arg2L
1564 + mov r_resL, __zero_reg__
1565 muls r_arg1H, r_arg2H
1567 + mov r_resH, __tmp_reg__
1568 mulsu r_arg1H, r_arg2L
1571 + add r_resL, __tmp_reg__
1572 + adc r_resH, __zero_reg__
1573 mulsu r_arg2H, r_arg1L
1576 + add r_resL, __tmp_reg__
1577 + adc r_resH, __zero_reg__
1581 @@ -420,15 +424,15 @@ __mulha3:
1584 mul r_arg1L, r_arg2L
1586 + mov r_resL, __zero_reg__
1587 mul r_arg1H, r_arg2H
1589 + mov r_resH, __tmp_reg__
1590 mul r_arg1H, r_arg2L
1593 + add r_resL, __tmp_reg__
1594 + adc r_resH, __zero_reg__
1595 mul r_arg1L, r_arg2H
1598 + add r_resL, __tmp_reg__
1599 + adc r_resH, __zero_reg__
1603 @@ -442,8 +446,8 @@ __muluha3:
1604 #define r_arg2H r23 /* multiplicand High */
1605 #define r_resL r18 /* result Low */
1606 #define r_resH r19 /* result High */
1607 -#define r_scratchL r0 /* scratch Low */
1608 -#define r_scratchH r1
1609 +#define r_scratchL __tmp_reg__ /* scratch Low */
1610 +#define r_scratchH __zero_reg__
1612 #if defined (L_mulha3)
1614 @@ -480,8 +484,8 @@ __mulha3_exit:
1616 clr r_resL ; clear result
1618 - mov_l r0, r_arg1L ; save multiplicand
1620 + mov_l __tmp_reg__, r_arg1L ; save multiplicand
1621 + mov_h __zero_reg__, r_arg1H
1624 rjmp __muluha3_skip1
1625 @@ -490,7 +494,12 @@ __muluha3_loop1:
1627 lsl r_arg1L ; shift multiplicand
1629 +#if defined (__AVR_TINY__)
1630 + subi r_arg1L, lo8(0)
1631 + sbci r_arg1L, hi8(0)
1635 breq __muluha3_loop1_done ; exit multiplicand = 0
1637 brne __muluha3_loop1 ; exit multiplier = 0
1638 @@ -500,7 +509,12 @@ __muluha3_loop1_done:
1640 lsr r_arg1H ; shift multiplicand
1642 +#if defined (__AVR_TINY__)
1643 + subi r_arg1L, lo8(0)
1644 + sbci r_arg1L, hi8(0)
1648 breq __muluha3_exit ; exit if multiplicand = 0
1650 rjmp __muluha3_skip2
1651 @@ -556,53 +570,53 @@ __mulsa3:
1654 mul r_arg1H, r_arg2L
1656 + mov r_resL, __zero_reg__
1657 mul r_arg1L, r_arg2H
1659 + add r_resL, __zero_reg__
1661 mul r_arg1L, r_arg2HL
1664 + add r_resL, __tmp_reg__
1665 + adc r_resH, __zero_reg__
1667 mul r_arg1H, r_arg2H
1670 + add r_resL, __tmp_reg__
1671 + adc r_resH, __zero_reg__
1673 mul r_arg1HL, r_arg2L
1676 + add r_resL, __tmp_reg__
1677 + adc r_resH, __zero_reg__
1679 mulsu r_arg2HH, r_arg1L
1683 + add r_resH, __tmp_reg__
1684 + adc r_resHL, __zero_reg__
1686 mul r_arg1H, r_arg2HL
1689 + add r_resH, __tmp_reg__
1690 + adc r_resHL, __zero_reg__
1692 mul r_arg1HL, r_arg2H
1695 + add r_resH, __tmp_reg__
1696 + adc r_resHL, __zero_reg__
1698 mulsu r_arg1HH, r_arg2L
1702 + add r_resH, __tmp_reg__
1703 + adc r_resHL, __zero_reg__
1705 mulsu r_arg2HH, r_arg1H
1708 + add r_resHL, __tmp_reg__
1709 + adc r_resHH, __zero_reg__
1710 mul r_arg1HL, r_arg2HL
1713 + add r_resHL, __tmp_reg__
1714 + adc r_resHH, __zero_reg__
1715 mulsu r_arg1HH, r_arg2H
1718 + add r_resHL, __tmp_reg__
1719 + adc r_resHH, __zero_reg__
1720 mulsu r_arg2HH, r_arg1HL
1722 + add r_resHH, __tmp_reg__
1723 mulsu r_arg1HH, r_arg2HL
1725 + add r_resHH, __tmp_reg__
1729 @@ -617,51 +631,51 @@ __mulusa3:
1732 mul r_arg1H, r_arg2L
1734 + mov r_resL, __zero_reg__
1735 mul r_arg1L, r_arg2H
1737 + add r_resL, __zero_reg__
1739 mul r_arg1L, r_arg2HL
1742 + add r_resL, __tmp_reg__
1743 + adc r_resH, __zero_reg__
1745 mul r_arg1H, r_arg2H
1748 + add r_resL, __tmp_reg__
1749 + adc r_resH, __zero_reg__
1751 mul r_arg1HL, r_arg2L
1754 + add r_resL, __tmp_reg__
1755 + adc r_resH, __zero_reg__
1757 mul r_arg1L, r_arg2HH
1760 + add r_resH, __tmp_reg__
1761 + adc r_resHL, __zero_reg__
1763 mul r_arg1H, r_arg2HL
1766 + add r_resH, __tmp_reg__
1767 + adc r_resHL, __zero_reg__
1769 mul r_arg1HL, r_arg2H
1772 + add r_resH, __tmp_reg__
1773 + adc r_resHL, __zero_reg__
1775 mul r_arg1HH, r_arg2L
1778 + add r_resH, __tmp_reg__
1779 + adc r_resHL, __zero_reg__
1781 mul r_arg1H, r_arg2HH
1784 + add r_resHL, __tmp_reg__
1785 + adc r_resHH, __zero_reg__
1786 mul r_arg1HL, r_arg2HL
1789 + add r_resHL, __tmp_reg__
1790 + adc r_resHH, __zero_reg__
1791 mul r_arg1HH, r_arg2H
1794 + add r_resHL, __tmp_reg__
1795 + adc r_resHH, __zero_reg__
1796 mul r_arg1HL, r_arg2HH
1798 + add r_resHH, __tmp_reg__
1799 mul r_arg1HH, r_arg2HL
1801 + add r_resHH, __tmp_reg__
1805 @@ -680,13 +694,20 @@ __mulusa3:
1806 #define r_arg2HL r26
1807 #define r_arg2HH r27 /* multiplicand High */
1809 +#if defined (__AVR_TINY__)
1810 +#define r_resL r28 /* result Low */
1812 +#define r_resHL r30
1813 +#define r_resHH r31 /* result High */
1815 #define r_resL r14 /* result Low */
1818 #define r_resHH r17 /* result High */
1821 -#define r_scratchL r0 /* scratch Low */
1822 -#define r_scratchH r1
1823 +#define r_scratchL __tmp_reg__ /* scratch Low */
1824 +#define r_scratchH __zero_reg__
1825 #define r_scratchHL r22
1826 #define r_scratchHH r23 /* scratch High */
1828 @@ -758,7 +779,12 @@ __mulusa3_skip1:
1832 +#if defined (__AVR_TINY__)
1833 + subi r_arg2HL, lo8(0)
1834 + sbci r_arg2HL, hi8(0)
1838 brne __mulusa3_loop1 ; exit multiplier = 0
1839 __mulusa3_loop1_done:
1840 mov_l r_arg1L, r_scratchL ; restore multiplicand
1841 @@ -779,7 +805,12 @@ __mulusa3_loop2:
1845 +#if defined (__AVR_TINY__)
1846 + subi r_arg2L, lo8(0)
1847 + sbci r_arg2L, hi8(0)
1851 brne __mulusa3_loop2 ; exit if multiplier = 0
1853 clr __zero_reg__ ; got clobbered
1854 @@ -791,9 +822,7 @@ __mulusa3_exit:
1864 @@ -821,8 +850,8 @@ __mulusa3_exit:
1870 + mov __tmp_reg__, r_divd
1871 + eor __tmp_reg__, r_div
1875 @@ -831,7 +860,7 @@ __divqq3:
1876 breq __divqq3_minus1 ; if equal return -1
1879 - sbrc r0, 7 ; negate result if needed
1880 + sbrc __tmp_reg__, 7 ; negate result if needed
1884 @@ -886,8 +915,8 @@ __udivuqq3_cont:
1890 + mov __tmp_reg__, r_divdH
1891 + eor __tmp_reg__, r_divH
1893 rjmp __divhq3_divpos
1895 @@ -906,7 +935,7 @@ __divhq3_divdpos:
1899 - sbrs r0, 7 ; negate result if needed
1900 + sbrs __tmp_reg__, 7 ; negate result if needed
1904 @@ -958,8 +987,8 @@ __udivuhq3_cont:
1910 + mov __tmp_reg__, r_divdH
1911 + eor __tmp_reg__, r_divH
1913 rjmp __divha3_divpos
1915 @@ -973,7 +1002,7 @@ __divha3_divpos:
1919 - sbrs r0, 7 ; negate result if needed
1920 + sbrs __tmp_reg__, 7 ; negate result if needed
1924 @@ -1027,8 +1056,8 @@ __udivuha3:
1930 + mov __tmp_reg__, r27
1931 + eor __tmp_reg__, r_divHH
1933 rjmp __divsa3_divpos
1935 @@ -1050,7 +1079,7 @@ __divsa3_divpos:
1939 - sbrs r0, 7 ; negate result if needed
1940 + sbrs __tmp_reg__, 7 ; negate result if needed
1944 diff -Naurp gcc/config/avr/libgcc.S gcc/config/avr/libgcc.S
1945 --- gcc/config/avr/libgcc.S 2011-10-27 17:00:24.000000000 +0530
1946 +++ gcc/config/avr/libgcc.S 2011-10-27 17:47:15.000000000 +0530
1947 @@ -22,8 +22,13 @@ a copy of the GCC Runtime Library Except
1948 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
1949 <http://www.gnu.org/licenses/>. */
1951 +#if defined (__AVR_TINY__)
1952 +#define __zero_reg__ r17
1953 +#define __tmp_reg__ r16
1955 #define __zero_reg__ r1
1956 #define __tmp_reg__ r0
1958 #define __SREG__ 0x3f
1959 #define __SP_H__ 0x3e
1960 #define __SP_L__ 0x3d
1961 @@ -141,7 +146,12 @@ __mulhi3_skip1:
1963 lsr r_arg1H ; gets LSB of multiplier
1965 +#if defined (__AVR_TINY__)
1966 + subi r_arg1L, lo8(0)
1967 + sbci r_arg1L, hi8(0)
1971 brne __mulhi3_loop ; exit if multiplier = 0
1973 mov r_arg1H,r_resH ; result to return register
1974 @@ -305,7 +315,12 @@ __mulsi3_skip1:
1978 +#if defined (__AVR_TINY__)
1979 + subi r_arg1HL, lo8(0)
1980 + sbci r_arg1HL, hi8(0)
1985 brne __mulsi3_loop ; exit if multiplier = 0
1987 @@ -611,6 +626,7 @@ __divmodsi4_neg1:
1988 /**********************************
1989 * This is a prologue subroutine
1990 **********************************/
1991 +#if !defined (__AVR_TINY__)
1992 #if defined (L_prologue)
1994 .global __prologue_saves__
1995 @@ -664,7 +680,6 @@ __prologue_saves__:
1996 * This is an epilogue subroutine
1998 #if defined (L_epilogue)
2000 .global __epilogue_restores__
2001 .func __epilogue_restores__
2002 __epilogue_restores__:
2003 @@ -705,6 +720,7 @@ __epilogue_restores__:
2006 #endif /* defined (L_epilogue) */
2007 +#endif /* !defined (__AVR_TINY__) */
2010 .section .fini9,"ax",@progbits
2011 @@ -731,6 +747,7 @@ _cleanup:
2013 #endif /* defined (L_cleanup) */
2015 +#if !defined(__AVR_TINY__)
2017 .global __tablejump2__
2018 .func __tablejump2__
2019 @@ -763,7 +780,9 @@ __tablejump__:
2022 #endif /* defined (L_tablejump) */
2025 +#if !defined(__AVR_TINY__)
2027 .section .init4,"ax",@progbits
2028 .global __do_copy_data
2029 @@ -825,6 +844,7 @@ __do_copy_data:
2030 brne .L__do_copy_data_loop
2031 #endif /* !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__) */
2032 #endif /* L_copy_data */
2035 /* __do_clear_bss is only necessary if there is anything in .bss section. */
2037 @@ -864,7 +884,12 @@ __do_global_ctors:
2038 ldi r16, hh8(__ctors_end)
2039 rjmp .L__do_global_ctors_start
2040 .L__do_global_ctors_loop:
2041 + #if defined (__AVR_TINY__)
2047 sbc r16, __zero_reg__
2050 @@ -883,7 +908,12 @@ __do_global_ctors:
2051 ldi r29, hi8(__ctors_end)
2052 rjmp .L__do_global_ctors_start
2053 .L__do_global_ctors_loop:
2054 +#if defined (__AVR_TINY__)
2063 @@ -905,7 +935,12 @@ __do_global_dtors:
2064 ldi r16, hh8(__dtors_start)
2065 rjmp .L__do_global_dtors_start
2066 .L__do_global_dtors_loop:
2067 + #if defined (__AVR_TINY__)
2073 sbc r16, __zero_reg__
2076 @@ -927,7 +962,12 @@ __do_global_dtors:
2080 +#if defined (__AVR_TINY__)
2086 .L__do_global_dtors_start:
2087 cpi r28, lo8(__dtors_end)
2089 @@ -935,6 +975,7 @@ __do_global_dtors:
2090 #endif /* defined(__AVR_HAVE_RAMPZ__) */
2091 #endif /* L_dtors */
2093 +#if !defined (__AVR_TINY__)
2094 #ifdef L_tablejump_elpm
2095 .global __tablejump_elpm__
2096 .func __tablejump_elpm__
2097 @@ -965,5 +1006,6 @@ __tablejump_elpm__:
2098 #endif /* defined (__AVR_HAVE_ELPM__) */
2100 #endif /* defined (L_tablejump_elpm) */
2101 +#endif /* !defined (__AVR_TINY__) */
2103 #include "libgcc-fixed.S"
2104 diff -Naurp gcc/config/avr/t-avr gcc/config/avr/t-avr
2105 --- gcc/config/avr/t-avr 2011-10-27 17:00:24.000000000 +0530
2106 +++ gcc/config/avr/t-avr 2011-10-27 17:47:15.000000000 +0530
2107 @@ -107,8 +107,8 @@ fp-bit.c: $(srcdir)/config/fp-bit.c $(sr
2111 -MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr31/mmcu=avr35/mmcu=avr4/mmcu=avr5/mmcu=avr51/mmcu=avr6/mmcu=avrxmega2/mmcu=avrxmega4/mmcu=avrxmega5/mmcu=avrxmega6/mmcu=avrxmega7
2112 -MULTILIB_DIRNAMES = avr2 avr25 avr3 avr31 avr35 avr4 avr5 avr51 avr6 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7
2113 +MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr31/mmcu=avr35/mmcu=avr4/mmcu=avr5/mmcu=avr51/mmcu=avr6/mmcu=avrxmega2/mmcu=avrxmega4/mmcu=avrxmega5/mmcu=avrxmega6/mmcu=avrxmega7/mmcu=avrtiny10
2114 +MULTILIB_DIRNAMES = avr2 avr25 avr3 avr31 avr35 avr4 avr5 avr51 avr6 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 avrtiny10
2116 # The many avr2 matches are not listed here - this is the default.
2117 MULTILIB_MATCHES = \
2118 @@ -270,7 +270,13 @@ MULTILIB_MATCHES = \
2119 mmcu?avrxmega6=mmcu?atxmega256a3b \
2120 mmcu?avrxmega6=mmcu?atxmega256d3 \
2121 mmcu?avrxmega7=mmcu?atxmega128a1 \
2122 - mmcu?avrxmega7=mmcu?atxmega128a1u
2123 + mmcu?avrxmega7=mmcu?atxmega128a1u \
2124 + mmcu?avrtiny10=mmcu?attiny4 \
2125 + mmcu?avrtiny10=mmcu?attiny5 \
2126 + mmcu?avrtiny10=mmcu?attiny9 \
2127 + mmcu?avrtiny10=mmcu?attiny10 \
2128 + mmcu?avrtiny10=mmcu?attiny20 \
2129 + mmcu?avrtiny10=mmcu?attiny40
2131 MULTILIB_EXCEPTIONS =