]> git.pld-linux.org Git - packages/crossavr-gcc.git/blob - 512-gcc-avrtc542.patch
- synchronized patches with official AVR toolchain 3.4.1.830
[packages/crossavr-gcc.git] / 512-gcc-avrtc542.patch
1 diff -Naurp gcc/config/avr/avr.c gcc/config/avr/avr.c
2 --- gcc/config/avr/avr.c        2012-07-05 14:23:46.000000000 +0530
3 +++ gcc/config/avr/avr.c        2012-07-06 17:30:38.000000000 +0530
4 @@ -2191,19 +2191,28 @@ out_movqi_r_mr (rtx insn, rtx op[], int 
5             fatal_insn ("incorrect insn:",insn);
6  
7           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
8 -           return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-63))) CR_TAB 
9 -                                         AS2 (sbci,r29,hi8(-(%o1-63))) CR_TAB
10 -                                                 AS2 (subi,r28,lo8(-63))       CR_TAB 
11 -                                         AS2 (sbci,r29,hi8(-63))       CR_TAB
12 -                                     AS2 (ld,%0,Y)                 CR_TAB
13 -                                                 AS2 (subi,r28,lo8(63))       CR_TAB 
14 -                                         AS2 (sbci,r29,hi8(63))       CR_TAB
15 -                                                 AS2 (subi,r28,lo8(%o1-63)) CR_TAB 
16 -                                         AS2 (sbci,r29,hi8(%o1-63)))  
17 -                                  : (AS2 (adiw,r28,%o1-63) CR_TAB
18 -                           AS2 (ldd,%0,Y+63)     CR_TAB
19 -                           AS2 (sbiw,r28,%o1-63));
20 -
21 +         {
22 +            if (AVR_TINY)
23 +            {
24 +              *l = 9;
25 +              return (AS2 (subi,r28,lo8(-(%o1-63))) CR_TAB
26 +                      AS2 (sbci,r29,hi8(-(%o1-63))) CR_TAB
27 +                      AS2 (subi,r28,lo8(-63))       CR_TAB
28 +                      AS2 (sbci,r29,hi8(-63))       CR_TAB
29 +                      AS2 (ld,%0,Y)                 CR_TAB
30 +                      AS2 (subi,r28,lo8(63))        CR_TAB
31 +                      AS2 (sbci,r29,hi8(63))        CR_TAB
32 +                      AS2 (subi,r28,lo8(%o1-63))    CR_TAB
33 +                      AS2 (sbci,r29,hi8(%o1-63)));
34 +            }
35 +            else
36 +            {
37 +              *l = 3;
38 +              return (AS2 (adiw,r28,%o1-63) CR_TAB
39 +                      AS2 (ldd,%0,Y+63)     CR_TAB
40 +                      AS2 (sbiw,r28,%o1-63));
41 +            }
42 +         }
43  
44           return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
45                           AS2 (sbci,r29,hi8(-%o1)) CR_TAB
46 @@ -2217,38 +2226,75 @@ out_movqi_r_mr (rtx insn, rtx op[], int 
47              it but I have this situation with extremal optimizing options.  */
48           if (reg_overlap_mentioned_p (dest, XEXP (x,0))
49               || reg_unused_after (insn, XEXP (x,0)))
50 -           return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
51 -                                                 AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
52 -                                                 AS2 (ld,%0,X))
53 -                                                  : (AS2 (adiw,r26,%o1) CR_TAB
54 -                           AS2 (ld,%0,X));
55 -
56 -           return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
57 -                                                 AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
58 -                                                 AS2 (ld,%0,X) CR_TAB
59 -                                                 AS2 (subi,r26,lo8(%o1)) CR_TAB 
60 -                                         AS2 (sbci,r27,hi8(%o1))) 
61 -                                                  : (AS2 (adiw,r26,%o1) CR_TAB
62 -                         AS2 (ld,%0,X)      CR_TAB
63 -                         AS2 (sbiw,r26,%o1));
64 +          {
65 +            if (AVR_TINY)
66 +            {
67 +              *l = 3;
68 +              return (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
69 +                      AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
70 +                      AS2 (ld,%0,X));
71 +            }
72 +            else
73 +            {
74 +              *l = 2;
75 +              return (AS2 (adiw,r26,%o1) CR_TAB
76 +                      AS2 (ld,%0,X));
77 +            }
78 +          }
79 +
80 +          if (AVR_TINY)
81 +          {
82 +            *l = 5;
83 +            return (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
84 +                    AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
85 +                    AS2 (ld,%0,X) CR_TAB
86 +                    AS2 (subi,r26,lo8(%o1)) CR_TAB 
87 +                    AS2 (sbci,r27,hi8(%o1)));
88 +          }
89 +          else
90 +          {
91 +            *l = 3;
92 +            return (AS2 (adiw,r26,%o1) CR_TAB
93 +                    AS2 (ld,%0,X)      CR_TAB
94 +                    AS2 (sbiw,r26,%o1));
95 +          }
96         }
97  
98 -      *l = 1;
99           op[2] = XEXP(x, 0);
100           if(REGNO(op[2]) == REG_Y)
101 -                         return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
102 -                                               AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
103 -                                               AS2 (ld,%0,Y)              CR_TAB
104 -                                   AS2 (subi,%A2,lo8(%o1)) CR_TAB
105 -                                               AS2 (sbci,%B2,hi8(%o1)))
106 -                                                :   AS2 (ldd,%0,%1);
107 +          {
108 +            if (AVR_TINY)
109 +            {
110 +              *l = 5;
111 +              return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
112 +                      AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
113 +                      AS2 (ld,%0,Y)              CR_TAB
114 +                      AS2 (subi,%A2,lo8(%o1)) CR_TAB
115 +                      AS2 (sbci,%B2,hi8(%o1)));
116 +            }
117 +            else
118 +            {
119 +              *l = 1;
120 +              return (AS2 (ldd,%0,%1));
121 +            }
122 +          }
123           if(REGNO(op[2]) == REG_Z)
124 -                         return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
125 -                                               AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
126 -                                               AS2 (ld,%0,Z)              CR_TAB
127 -                                   AS2 (subi,%A2,lo8(%o1)) CR_TAB
128 -                                               AS2 (sbci,%B2,hi8(%o1)))
129 -                                                :   AS2 (ldd,%0,%1);
130 +          {
131 +            if (AVR_TINY)
132 +            {
133 +              *l = 5;
134 +              return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
135 +                      AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
136 +                      AS2 (ld,%0,Z)              CR_TAB
137 +                      AS2 (subi,%A2,lo8(%o1)) CR_TAB
138 +                      AS2 (sbci,%B2,hi8(%o1)));
139 +            }
140 +            else
141 +            {
142 +              *l = 1;
143 +              return (AS2 (ldd,%0,%1));
144 +            }
145 +          }
146      }
147    *l = 1;
148    return AS2 (ld,%0,%1);
149 @@ -2287,36 +2333,57 @@ out_movhi_r_mr (rtx insn, rtx op[], int 
150               return (AS2 (ld,%A0,X+) CR_TAB
151                       AS2 (ld,%B0,X));
152             }
153 -         *l  = 3;
154 -         return AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB
155 -                               AS2 (ld,%B0,X)  CR_TAB
156 -                               AS2 (subi,r26,lo8(1))          CR_TAB
157 -                               AS2 (sbci,r27,hi8(1))) 
158 -                        : (AS2 (ld,%A0,X+) CR_TAB
159 -                 AS2 (ld,%B0,X) CR_TAB
160 -                 AS2 (sbiw,r26,1));
161 +
162 +          if (AVR_TINY)
163 +          {
164 +            *l = 4;
165 +            return (AS2 (ld,%A0,X+)       CR_TAB
166 +                    AS2 (ld,%B0,X)        CR_TAB
167 +                    AS2 (subi,r26,lo8(1)) CR_TAB
168 +                    AS2 (sbci,r27,hi8(1)));
169 +          }
170 +          else
171 +          {
172 +            *l = 3;
173 +            return (AS2 (ld,%A0,X+) CR_TAB
174 +                    AS2 (ld,%B0,X)  CR_TAB
175 +                    AS2 (sbiw,r26,1));
176 +          }
177          }
178        else                      /* (R)  */
179         {
180 -         *l = 2;
181           if(reg_base == REG_Y)
182 -         return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB
183 -                               AS2 (subi,r28,lo8((-1))) CR_TAB
184 -                               AS2 (sbci,r29,hi8((-1))) CR_TAB 
185 -                       AS2 (ld,%B0,%1) CR_TAB
186 -                               AS2 (subi,r28,lo8(1)) CR_TAB
187 -                               AS2 (sbci,r29,hi8(1)))             
188 -                        : (AS2 (ld,%A0,%1) CR_TAB
189 -                               AS2 (ldd,%B0,%1+1));
190 +          {
191 +            if (AVR_TINY) {
192 +              *l = 6;
193 +              return (AS2 (ld,%A0,%1) CR_TAB
194 +                      AS2 (subi,r28,lo8((-1))) CR_TAB
195 +                      AS2 (sbci,r29,hi8((-1))) CR_TAB 
196 +                      AS2 (ld,%B0,%1) CR_TAB
197 +                      AS2 (subi,r28,lo8(1)) CR_TAB
198 +                      AS2 (sbci,r29,hi8(1)));
199 +            } else {
200 +              *l = 2;
201 +              return (AS2 (ld,%A0,%1) CR_TAB
202 +                      AS2 (ldd,%B0,%1+1));
203 +            }
204 +          }
205           if(reg_base == REG_Z)
206 -         return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB
207 -                               AS2 (subi,r30,lo8((-1))) CR_TAB
208 -                               AS2 (sbci,r31,hi8((-1))) CR_TAB 
209 -                       AS2 (ld,%B0,%1) CR_TAB
210 -                               AS2 (subi,r30,lo8(1)) CR_TAB
211 -                               AS2 (sbci,r31,hi8(1)))
212 -                        : (AS2 (ld,%A0,%1) CR_TAB
213 -                 AS2 (ldd,%B0,%1+1));
214 +          {
215 +            if (AVR_TINY) {
216 +              *l = 6;
217 +              return (AS2 (ld,%A0,%1) CR_TAB
218 +                      AS2 (subi,r30,lo8((-1))) CR_TAB
219 +                      AS2 (sbci,r31,hi8((-1))) CR_TAB 
220 +                      AS2 (ld,%B0,%1) CR_TAB
221 +                      AS2 (subi,r30,lo8(1)) CR_TAB
222 +                      AS2 (sbci,r31,hi8(1)));
223 +            } else {
224 +              *l = 2;
225 +              return (AS2 (ld,%A0,%1) CR_TAB
226 +                      AS2 (ldd,%B0,%1+1));
227 +            }
228 +          }
229         }
230      }
231    else if (GET_CODE (base) == PLUS) /* (R + i) */
232 @@ -2330,35 +2397,47 @@ out_movhi_r_mr (rtx insn, rtx op[], int 
233             fatal_insn ("incorrect insn:",insn);
234           
235           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
236 -           return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-62))) CR_TAB 
237 -                                         AS2 (sbci,r29,hi8(-(%o1-62))) CR_TAB
238 -                                                 AS2 (subi,r28,lo8(-62))       CR_TAB 
239 -                                         AS2 (sbci,r29,hi8(-62))       CR_TAB
240 -                                     AS2 (ld,%A0,Y+)                CR_TAB
241 -                                     AS2 (ld,%B0,Y)                CR_TAB
242 -                                                 AS2 (subi,r28,lo8(63))       CR_TAB 
243 -                                         AS2 (sbci,r29,hi8(63))       CR_TAB
244 -                                                 AS2 (subi,r28,lo8(%o1-62)) CR_TAB 
245 -                                         AS2 (sbci,r29,hi8(%o1-62)))  
246 -                                      : (AS2 (adiw,r28,%o1-62) CR_TAB
247 -                           AS2 (ldd,%A0,Y+62)    CR_TAB
248 -                           AS2 (ldd,%B0,Y+63)    CR_TAB
249 -                           AS2 (sbiw,r28,%o1-62));
250 -
251 -         return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB
252 -                                               AS2 (sbci,r29,hi8(-%o1)) CR_TAB
253 -                                               AS2 (ld,%A0,Y+)          CR_TAB
254 -                                               AS2 (ld,%B0,Y)           CR_TAB
255 -                                               AS2 (subi,r28,lo8(1))   CR_TAB
256 -                                               AS2 (sbci,r29,hi8(1))   CR_TAB
257 -                                       AS2 (subi,r28,lo8(%o1))  CR_TAB
258 -                                               AS2 (sbci,r29,hi8(%o1)))
259 -                                                : (AS2 (subi,r28,lo8(-%o1)) CR_TAB
260 -                         AS2 (sbci,r29,hi8(-%o1)) CR_TAB
261 -                         AS2 (ld,%A0,Y)           CR_TAB
262 -                         AS2 (ldd,%B0,Y+1)        CR_TAB
263 -                         AS2 (subi,r28,lo8(%o1))  CR_TAB
264 -                         AS2 (sbci,r29,hi8(%o1)));
265 +          {
266 +            if (AVR_TINY) {
267 +              *l = 10;
268 +              return (AS2 (subi,r28,lo8(-(%o1-62))) CR_TAB 
269 +                      AS2 (sbci,r29,hi8(-(%o1-62))) CR_TAB
270 +                      AS2 (subi,r28,lo8(-62))       CR_TAB 
271 +                      AS2 (sbci,r29,hi8(-62))       CR_TAB
272 +                      AS2 (ld,%A0,Y+)               CR_TAB
273 +                      AS2 (ld,%B0,Y)                CR_TAB
274 +                      AS2 (subi,r28,lo8(63))        CR_TAB 
275 +                      AS2 (sbci,r29,hi8(63))        CR_TAB
276 +                      AS2 (subi,r28,lo8(%o1-62))    CR_TAB 
277 +                      AS2 (sbci,r29,hi8(%o1-62)));
278 +            } else {
279 +              *l = 4;
280 +              return (AS2 (adiw,r28,%o1-62) CR_TAB
281 +                      AS2 (ldd,%A0,Y+62)    CR_TAB
282 +                      AS2 (ldd,%B0,Y+63)    CR_TAB
283 +                      AS2 (sbiw,r28,%o1-62));
284 +            }
285 +          }
286 +
287 +         if (AVR_TINY) {
288 +            *l = 8;
289 +            return (AS2 (subi,r28,lo8(-%o1)) CR_TAB
290 +                    AS2 (sbci,r29,hi8(-%o1)) CR_TAB
291 +                    AS2 (ld,%A0,Y+)          CR_TAB
292 +                    AS2 (ld,%B0,Y)           CR_TAB
293 +                    AS2 (subi,r28,lo8(1))    CR_TAB
294 +                    AS2 (sbci,r29,hi8(1))    CR_TAB
295 +                    AS2 (subi,r28,lo8(%o1))  CR_TAB
296 +                    AS2 (sbci,r29,hi8(%o1)));
297 +          } else {
298 +            *l = 6;
299 +            return (AS2 (subi,r28,lo8(-%o1)) CR_TAB
300 +                    AS2 (sbci,r29,hi8(-%o1)) CR_TAB
301 +                    AS2 (ld,%A0,Y)           CR_TAB
302 +                    AS2 (ldd,%B0,Y+1)        CR_TAB
303 +                    AS2 (subi,r28,lo8(%o1))  CR_TAB
304 +                    AS2 (sbci,r29,hi8(%o1)));
305 +          }
306         }
307        if (reg_base == REG_X)
308         {
309 @@ -2366,80 +2445,114 @@ out_movhi_r_mr (rtx insn, rtx op[], int 
310              it but I have this situation with extremal
311              optimization options.  */
312           
313 -         *l = 4;
314 -         if (reg_base == reg_dest)
315 -           return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1))      CR_TAB
316 -                                 AS2 (sbci,r27,hi8(-%o1))      CR_TAB
317 -                         AS2 (ld,__tmp_reg__,X+)       CR_TAB
318 -                         AS2 (ld,%B0,X)                CR_TAB
319 -                         AS2 (mov,%A0,__tmp_reg__))
320 -                              : (AS2 (adiw,r26,%o1)      CR_TAB
321 -                   AS2 (ld,__tmp_reg__,X+) CR_TAB
322 -                   AS2 (ld,%B0,X)          CR_TAB
323 -                   AS2 (mov,%A0,__tmp_reg__));
324 -
325 -           return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1))      CR_TAB
326 -                         AS2 (sbci,r27,hi8(-%o1))      CR_TAB
327 -                         AS2 (ld,%A0,X+)                            CR_TAB
328 -                         AS2 (ld,%B0,X)                CR_TAB
329 -                                 AS2 (subi,r26,lo8(%o1+1))     CR_TAB 
330 -                                 AS2 (sbci,r27,hi8(%o1+1)))  
331 -                          : (AS2 (adiw,r26,%o1) CR_TAB
332 -                 AS2 (ld,%A0,X+)    CR_TAB
333 -                 AS2 (ld,%B0,X)     CR_TAB
334 -                 AS2 (sbiw,r26,%o1+1));
335 +         if (reg_base == reg_dest) {
336 +            if (AVR_TINY) {
337 +              *l = 5;
338 +              return (AS2 (subi,r26,lo8(-%o1))      CR_TAB
339 +                      AS2 (sbci,r27,hi8(-%o1))      CR_TAB
340 +                      AS2 (ld,__tmp_reg__,X+)       CR_TAB
341 +                      AS2 (ld,%B0,X)                CR_TAB
342 +                      AS2 (mov,%A0,__tmp_reg__));
343 +            } else {
344 +              *l = 4;
345 +              return (AS2 (adiw,r26,%o1)      CR_TAB
346 +                      AS2 (ld,__tmp_reg__,X+) CR_TAB
347 +                      AS2 (ld,%B0,X)          CR_TAB
348 +                      AS2 (mov,%A0,__tmp_reg__));
349 +            }
350 +          }
351 +
352 +          if (AVR_TINY) {
353 +            *l = 6;
354 +            return (AS2 (subi,r26,lo8(-%o1))      CR_TAB
355 +                    AS2 (sbci,r27,hi8(-%o1))      CR_TAB
356 +                    AS2 (ld,%A0,X+)              CR_TAB
357 +                    AS2 (ld,%B0,X)                CR_TAB
358 +                    AS2 (subi,r26,lo8(%o1+1))     CR_TAB 
359 +                    AS2 (sbci,r27,hi8(%o1+1)));
360 +          } else {
361 +            *l = 4;
362 +            return (AS2 (adiw,r26,%o1) CR_TAB
363 +                    AS2 (ld,%A0,X+)    CR_TAB
364 +                    AS2 (ld,%B0,X)     CR_TAB
365 +                    AS2 (sbiw,r26,%o1+1));
366 +          }
367         }
368  
369        if (reg_base == reg_dest)
370         {
371 -         *l = 3;
372           op[2] = XEXP(base, 0);
373  
374 -         if(REGNO(op[2]) == REG_Y)
375 -                         return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
376 -                                               AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
377 -                                               AS2 (ld,__tmp_reg__,Y+)     CR_TAB
378 -                               AS2 (ld,%B0,Y)         CR_TAB
379 -                                               AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
380 -                                               AS2 (sbci,%B2,hi8(%o1+1)) CR_TAB
381 -                                               AS2 (mov,%A0,__tmp_reg__))
382 -                                        :  (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
383 -                               AS2 (ldd,%B0,%B1)         CR_TAB
384 -                               AS2 (mov,%A0,__tmp_reg__));
385 -         if(REGNO(op[2]) == REG_Z)
386 -                         return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
387 -                                               AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
388 -                                               AS2 (ld,__tmp_reg__,Z+)     CR_TAB
389 -                               AS2 (ld,%B0,Z)         CR_TAB
390 -                                               AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
391 -                                               AS2 (sbci,%B2,hi8(%o1+1)) CR_TAB
392 -                                               AS2 (mov,%A0,__tmp_reg__))
393 -                                        :  (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
394 -                 AS2 (ldd,%B0,%B1)         CR_TAB
395 -                 AS2 (mov,%A0,__tmp_reg__));
396 +         if(REGNO(op[2]) == REG_Y) {
397 +            if (AVR_TINY) {
398 +              *l = 7;
399 +              return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
400 +                      AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
401 +                      AS2 (ld,__tmp_reg__,Y+)    CR_TAB
402 +                      AS2 (ld,%B0,Y)             CR_TAB
403 +                      AS2 (subi,%A2,lo8(%o1+1))  CR_TAB
404 +                      AS2 (sbci,%B2,hi8(%o1+1))  CR_TAB
405 +                      AS2 (mov,%A0,__tmp_reg__));
406 +            } else {
407 +              *l = 3;
408 +              return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
409 +                      AS2 (ldd,%B0,%B1)         CR_TAB
410 +                      AS2 (mov,%A0,__tmp_reg__));
411 +            }
412 +          }
413 +         if(REGNO(op[2]) == REG_Z) {
414 +            if (AVR_TINY) {
415 +              *l = 7;
416 +              return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
417 +                      AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
418 +                      AS2 (ld,__tmp_reg__,Z+)    CR_TAB
419 +                      AS2 (ld,%B0,Z)             CR_TAB
420 +                      AS2 (subi,%A2,lo8(%o1+1))  CR_TAB
421 +                      AS2 (sbci,%B2,hi8(%o1+1))  CR_TAB
422 +                      AS2 (mov,%A0,__tmp_reg__));
423 +            } else {
424 +              *l = 3;
425 +              return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
426 +                      AS2 (ldd,%B0,%B1)         CR_TAB
427 +                      AS2 (mov,%A0,__tmp_reg__));
428 +            }
429 +          }
430         }
431 -      *l = 2;
432  
433           op[2] = XEXP(base, 0);
434  
435           if(REGNO(op[2]) == REG_Y)
436 -                         return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
437 -                                               AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
438 -                                               AS2 (ld,%A0,Y+)             CR_TAB
439 -                               AS2 (ld,%B0,Y)               CR_TAB
440 -                                               AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
441 -                                               AS2 (sbci,%B2,hi8(%o1+1)))
442 -                                        :  (AS2 (ldd,%A0,%A1) CR_TAB
443 -                               AS2 (ldd,%B0,%B1));
444 +          {
445 +            if (AVR_TINY) {
446 +              *l = 6;
447 +              return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
448 +                      AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
449 +                      AS2 (ld,%A0,Y+)            CR_TAB
450 +                      AS2 (ld,%B0,Y)             CR_TAB
451 +                      AS2 (subi,%A2,lo8(%o1+1))  CR_TAB
452 +                      AS2 (sbci,%B2,hi8(%o1+1)));
453 +            } else {
454 +              *l = 2;
455 +              return (AS2 (ldd,%A0,%A1) CR_TAB
456 +                      AS2 (ldd,%B0,%B1));
457 +            }
458 +          }
459           if(REGNO(op[2]) == REG_Z)
460 -                         return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
461 -                                               AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
462 -                                               AS2 (ld,%A0,Z+)             CR_TAB
463 -                               AS2 (ld,%B0,Z)              CR_TAB
464 -                                               AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
465 -                                               AS2 (sbci,%B2,hi8(%o1+1)))
466 -                                        :  (AS2 (ldd,%A0,%A1) CR_TAB
467 -             AS2 (ldd,%B0,%B1));
468 +          {
469 +            if (AVR_TINY) {
470 +              *l = 6;
471 +              return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
472 +                      AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
473 +                      AS2 (ld,%A0,Z+)            CR_TAB
474 +                      AS2 (ld,%B0,Z)             CR_TAB
475 +                      AS2 (subi,%A2,lo8(%o1+1))  CR_TAB
476 +                      AS2 (sbci,%B2,hi8(%o1+1)));
477 +            } else {
478 +              *l = 2;
479 +              return (AS2 (ldd,%A0,%A1) CR_TAB
480 +                      AS2 (ldd,%B0,%B1));
481 +            }
482 +          }
483        
484      }
485    else if (GET_CODE (base) == PRE_DEC) /* (--R) */
486 @@ -2450,35 +2563,43 @@ out_movhi_r_mr (rtx insn, rtx op[], int 
487        if (mem_volatile_p)
488          {
489            if (REGNO (XEXP (base, 0)) == REG_X)
490 -            {
491 +          {
492 +            if (AVR_TINY) {
493 +              *l = 6;
494 +              return (AS2 (subi,r26,lo8(2))  CR_TAB
495 +                      AS2 (sbci,r27,hi8(2))  CR_TAB
496 +                      AS2 (ld,%A0,X+)        CR_TAB
497 +                      AS2 (ld,%B0,X)         CR_TAB
498 +                      AS2 (subi,r26,lo8(1))  CR_TAB
499 +                      AS2 (sbci,r27,hi8(1)));
500 +            } else {
501                *l = 4;
502 -              return AVR_TINY ? (AS2 (subi,r26,lo8(2))  CR_TAB
503 -                                           AS2 (sbci,r27,hi8(2))   CR_TAB
504 -                        AS2 (ld,%A0,X+)            CR_TAB
505 -                        AS2 (ld,%B0,X)             CR_TAB
506 -                                               AS2 (subi,r26,lo8(1))  CR_TAB
507 -                                               AS2 (sbci,r27,hi8(1)))
508 -                                        : (AS2 (sbiw,r26,2)  CR_TAB
509 +              return (AS2 (sbiw,r26,2)  CR_TAB
510                        AS2 (ld,%A0,X+)   CR_TAB
511                        AS2 (ld,%B0,X)    CR_TAB
512                        AS2 (sbiw,r26,1));
513              }
514 +          }
515            else
516 -            {
517 +          {
518 +            //FIXME:check the code once again for AVR_TINY
519 +            if (AVR_TINY) {
520 +              *l = 8;
521 +              return (AS2 (subi,%A1,lo8(3))  CR_TAB
522 +                      AS2 (sbci,%B1,hi8(3))  CR_TAB
523 +                      AS2 (ld,%A0,%p1)       CR_TAB
524 +                      AS2 (subi,%A1,lo8(-1)) CR_TAB
525 +                      AS2 (sbci,%B1,hi8(-1)) CR_TAB
526 +                      AS2 (ld,%B0,%p1)       CR_TAB
527 +                      AS2 (subi,%A1,lo8(1))  CR_TAB
528 +                      AS2 (sbci,%B1,hi8(1)));
529 +            } else {
530                *l = 3;
531 -                         //FIXME:check the code once again for AVR_TINY
532 -              return AVR_TINY ? (AS2 (subi,%A1,lo8(3))  CR_TAB 
533 -                                           AS2 (sbci,%B1,hi8(3))  CR_TAB
534 -                        AS2 (ld,%A0,%p1)       CR_TAB
535 -                                               AS2 (subi,%A1,lo8(-1)) CR_TAB
536 -                                               AS2 (sbci,%B1,hi8(-1)) CR_TAB
537 -                        AS2 (ld,%B0,%p1)       CR_TAB
538 -                                               AS2 (subi,%A1,lo8(1)) CR_TAB
539 -                                               AS2 (sbci,%B1,hi8(1)))
540 -                                        : (AS2 (sbiw,%r1,2)   CR_TAB
541 -                      AS2 (ld,%A0,%p1)  CR_TAB
542 +              return (AS2 (sbiw,%r1,2)   CR_TAB
543 +                      AS2 (ld,%A0,%p1)   CR_TAB
544                        AS2 (ldd,%B0,%p1+1));
545              }
546 +          }
547          }
548  
549        *l = 2;
550 @@ -2529,24 +2650,30 @@ out_movsi_r_mr (rtx insn, rtx op[], int 
551      {
552        if (reg_base == REG_X)        /* (R26) */
553          {
554 -          if (reg_dest == REG_X)
555 +          if (reg_dest == REG_X) {
556             /* "ld r26,-X" is undefined */
557 -           return *l=7, AVR_TINY ? (AS2 (subi,r26,lo8(-3))  CR_TAB
558 -                                               AS2 (sbci,r27,hi8(-3))  CR_TAB
559 -                                   AS2 (ld,r29,X)          CR_TAB
560 -                                   AS2 (ld,r28,-X)         CR_TAB
561 -                                   AS2 (ld,__tmp_reg__,-X) CR_TAB
562 -                                   AS2 (subi,r26,lo8(1))   CR_TAB
563 -                                   AS2 (sbci,r27,hi8(1))   CR_TAB
564 -                                   AS2 (ld,r26,X)          CR_TAB
565 -                                   AS2 (mov,r27,__tmp_reg__))
566 -                                    : (AS2 (adiw,r26,3)        CR_TAB
567 -                         AS2 (ld,r29,X)          CR_TAB
568 -                         AS2 (ld,r28,-X)         CR_TAB
569 -                         AS2 (ld,__tmp_reg__,-X) CR_TAB
570 -                         AS2 (sbiw,r26,1)        CR_TAB
571 -                         AS2 (ld,r26,X)          CR_TAB
572 -                         AS2 (mov,r27,__tmp_reg__));
573 +            if (AVR_TINY) {
574 +              *l = 9;
575 +              return (AS2 (subi,r26,lo8(-3))  CR_TAB
576 +                      AS2 (sbci,r27,hi8(-3))  CR_TAB
577 +                     AS2 (ld,r29,X)          CR_TAB
578 +                      AS2 (ld,r28,-X)         CR_TAB
579 +                      AS2 (ld,__tmp_reg__,-X) CR_TAB
580 +                      AS2 (subi,r26,lo8(1))   CR_TAB
581 +                      AS2 (sbci,r27,hi8(1))   CR_TAB
582 +                      AS2 (ld,r26,X)          CR_TAB
583 +                      AS2 (mov,r27,__tmp_reg__));
584 +            } else {
585 +              *l = 7;
586 +              return (AS2 (adiw,r26,3)        CR_TAB
587 +                      AS2 (ld,r29,X)          CR_TAB
588 +                      AS2 (ld,r28,-X)         CR_TAB
589 +                      AS2 (ld,__tmp_reg__,-X) CR_TAB
590 +                      AS2 (sbiw,r26,1)        CR_TAB
591 +                      AS2 (ld,r26,X)          CR_TAB
592 +                      AS2 (mov,r27,__tmp_reg__));
593 +            }
594 +          }
595  
596            else if (reg_dest == REG_X - 2)
597              return *l=5, (AS2 (ld,%A0,X+)  CR_TAB
598 @@ -2559,113 +2686,154 @@ out_movsi_r_mr (rtx insn, rtx op[], int 
599                             AS2 (ld,%B0,X+) CR_TAB
600                             AS2 (ld,%C0,X+) CR_TAB
601                             AS2 (ld,%D0,X));
602 -          else
603 -            return  *l=5, AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB
604 -                             AS2 (ld,%B0,X+) CR_TAB
605 -                             AS2 (ld,%C0,X+) CR_TAB
606 -                             AS2 (ld,%D0,X)  CR_TAB
607 -                             AS2 (subi,r26,lo8(3)) CR_TAB
608 -                             AS2 (sbci,r27,hi8(3)))
609 -                                                 : (AS2 (ld,%A0,X+) CR_TAB
610 -                           AS2 (ld,%B0,X+) CR_TAB
611 -                           AS2 (ld,%C0,X+) CR_TAB
612 -                           AS2 (ld,%D0,X)  CR_TAB
613 -                           AS2 (sbiw,r26,3));
614 +          else {
615 +            if (AVR_TINY) {
616 +              *l = 6;
617 +              return (AS2 (ld,%A0,X+) CR_TAB
618 +                      AS2 (ld,%B0,X+) CR_TAB
619 +                      AS2 (ld,%C0,X+) CR_TAB
620 +                      AS2 (ld,%D0,X)  CR_TAB
621 +                      AS2 (subi,r26,lo8(3)) CR_TAB
622 +                      AS2 (sbci,r27,hi8(3)));
623 +            } else {
624 +              *l = 5;
625 +              return (AS2 (ld,%A0,X+) CR_TAB
626 +                      AS2 (ld,%B0,X+) CR_TAB
627 +                      AS2 (ld,%C0,X+) CR_TAB
628 +                      AS2 (ld,%D0,X)  CR_TAB
629 +                      AS2 (sbiw,r26,3));
630 +            }
631 +          }
632          }
633        else
634          {
635            if (reg_dest == reg_base)
636 -                 {
637 -                         if(reg_base == REG_Y)
638 -            return *l=5, AVR_TINY ? (AS2 (subi,r28,lo8(-3)) CR_TAB
639 -                                                       AS2 (sbci,r29,hi8(-3)) CR_TAB
640 -                                           AS2 (ld,%D0,Y)        CR_TAB
641 -                            AS2 (ld,%C0,-Y)        CR_TAB
642 -                            AS2 (subi,r28,lo8(1)) CR_TAB
643 -                            AS2 (sbci,r29,hi8(1)) CR_TAB
644 -                            AS2 (ld,__tmp_reg__,%1)  CR_TAB
645 -                            AS2 (subi,r28,lo8(1)) CR_TAB
646 -                            AS2 (sbci,r29,hi8(1)) CR_TAB
647 -                            AS2 (ld,%A0,%1)  CR_TAB
648 -                            AS2 (mov,%B0,__tmp_reg__))
649 -                                            : (AS2 (ldd,%D0,%1+3) CR_TAB
650 -                            AS2 (ldd,%C0,%1+2) CR_TAB
651 -                            AS2 (ldd,__tmp_reg__,%1+1)  CR_TAB
652 -                            AS2 (ld,%A0,%1)  CR_TAB
653 -                            AS2 (mov,%B0,__tmp_reg__));
654 -                         if(reg_base == REG_Z)
655 -            return *l=5, AVR_TINY ? (AS2 (subi,r30,lo8(-3)) CR_TAB
656 -                                                       AS2 (sbci,r31,hi8(-3)) CR_TAB
657 -                                           AS2 (ld,%D0,Z)        CR_TAB
658 -                            AS2 (ld,%C0,-Z)        CR_TAB
659 -                            AS2 (subi,r30,lo8(1)) CR_TAB
660 -                            AS2 (sbci,r31,hi8(1)) CR_TAB
661 -                            AS2 (ld,__tmp_reg__,%1)  CR_TAB
662 -                            AS2 (subi,r30,lo8(1)) CR_TAB
663 -                            AS2 (sbci,r31,hi8(1)) CR_TAB
664 -                            AS2 (ld,%A0,%1)  CR_TAB
665 -                            AS2 (mov,%B0,__tmp_reg__))
666 -                                            : (AS2 (ldd,%D0,%1+3) CR_TAB
667 -                          AS2 (ldd,%C0,%1+2) CR_TAB
668 -                          AS2 (ldd,__tmp_reg__,%1+1)  CR_TAB
669 -                          AS2 (ld,%A0,%1)  CR_TAB
670 -                          AS2 (mov,%B0,__tmp_reg__));
671 -                 }
672 +         {
673 +            if(reg_base == REG_Y) {
674 +              if (AVR_TINY) {
675 +                *l = 11;
676 +                return (AS2 (subi,r28,lo8(-3)) CR_TAB
677 +                        AS2 (sbci,r29,hi8(-3)) CR_TAB
678 +                        AS2 (ld,%D0,Y)         CR_TAB
679 +                        AS2 (ld,%C0,-Y)        CR_TAB
680 +                        AS2 (subi,r28,lo8(1))  CR_TAB
681 +                        AS2 (sbci,r29,hi8(1))  CR_TAB
682 +                        AS2 (ld,__tmp_reg__,%1)  CR_TAB
683 +                        AS2 (subi,r28,lo8(1)) CR_TAB
684 +                        AS2 (sbci,r29,hi8(1)) CR_TAB
685 +                        AS2 (ld,%A0,%1)  CR_TAB
686 +                        AS2 (mov,%B0,__tmp_reg__));
687 +              } else {
688 +                *l = 5;
689 +                return (AS2 (ldd,%D0,%1+3) CR_TAB
690 +                        AS2 (ldd,%C0,%1+2) CR_TAB
691 +                        AS2 (ldd,__tmp_reg__,%1+1)  CR_TAB
692 +                        AS2 (ld,%A0,%1)  CR_TAB
693 +                        AS2 (mov,%B0,__tmp_reg__));
694 +              }
695 +            }
696  
697 +            if(reg_base == REG_Z) {
698 +              if (AVR_TINY) {
699 +                *l = 11;
700 +                return (AS2 (subi,r30,lo8(-3)) CR_TAB
701 +                        AS2 (sbci,r31,hi8(-3)) CR_TAB
702 +                        AS2 (ld,%D0,Z)         CR_TAB
703 +                        AS2 (ld,%C0,-Z)        CR_TAB
704 +                        AS2 (subi,r30,lo8(1))  CR_TAB
705 +                        AS2 (sbci,r31,hi8(1))  CR_TAB
706 +                        AS2 (ld,__tmp_reg__,%1)  CR_TAB
707 +                        AS2 (subi,r30,lo8(1)) CR_TAB
708 +                        AS2 (sbci,r31,hi8(1)) CR_TAB
709 +                        AS2 (ld,%A0,%1)  CR_TAB
710 +                        AS2 (mov,%B0,__tmp_reg__));
711 +              } else {
712 +                *l = 5;
713 +                return (AS2 (ldd,%D0,%1+3) CR_TAB
714 +                        AS2 (ldd,%C0,%1+2) CR_TAB
715 +                        AS2 (ldd,__tmp_reg__,%1+1)  CR_TAB
716 +                        AS2 (ld,%A0,%1)  CR_TAB
717 +                        AS2 (mov,%B0,__tmp_reg__));
718 +              }
719 +            }
720 +          }
721            else if (reg_base == reg_dest + 2)
722 -                 {
723 -                         if(reg_base == REG_Y)
724 -            return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Y+)       CR_TAB
725 -                            AS2 (ld,%B0,Y+) CR_TAB
726 -                            AS2 (ld,__tmp_reg__,Y+) CR_TAB
727 -                            AS2 (ld,%D0,Y) CR_TAB
728 -                            AS2 (subi,r28,lo8(3)) CR_TAB
729 -                            AS2 (sbci,r29,hi8(3)) CR_TAB
730 -                            AS2 (mov,%C0,__tmp_reg__))
731 -                                            : (AS2 (ld ,%A0,%1)    CR_TAB
732 -                            AS2 (ldd,%B0,%1+1) CR_TAB
733 -                            AS2 (ldd,__tmp_reg__,%1+2)  CR_TAB
734 -                            AS2 (ldd,%D0,%1+3) CR_TAB
735 -                            AS2 (mov,%C0,__tmp_reg__));
736 -                         if(reg_base == REG_Z)
737 -            return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Z+)       CR_TAB
738 -                            AS2 (ld,%B0,Z+) CR_TAB
739 -                            AS2 (ld,__tmp_reg__,Z+) CR_TAB
740 -                            AS2 (ld,%D0,Z) CR_TAB
741 -                            AS2 (subi,r30,lo8(3)) CR_TAB
742 -                            AS2 (sbci,r31,hi8(3)) CR_TAB
743 -                            AS2 (mov,%C0,__tmp_reg__))
744 -                                            : (AS2 (ld ,%A0,%1)    CR_TAB
745 -                          AS2 (ldd,%B0,%1+1) CR_TAB
746 -                          AS2 (ldd,__tmp_reg__,%1+2)  CR_TAB
747 -                          AS2 (ldd,%D0,%1+3) CR_TAB
748 -                          AS2 (mov,%C0,__tmp_reg__));
749 -                 }
750 -          else
751 -                 {
752 -                         if(reg_base == REG_Y)
753 -            return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Y+)   CR_TAB
754 -                            AS2 (ld,%B0,Y+) CR_TAB
755 -                            AS2 (ld,%C0,Y+) CR_TAB
756 -                            AS2 (ld,%D0,Y)  CR_TAB
757 -                            AS2 (subi,r28,lo8(3)) CR_TAB
758 -                            AS2 (sbci,r29,hi8(3)))
759 -                                                    : (AS2 (ld ,%A0,%1)   CR_TAB
760 -                          AS2 (ldd,%B0,%1+1) CR_TAB
761 -                          AS2 (ldd,%C0,%1+2) CR_TAB
762 -                          AS2 (ldd,%D0,%1+3));
763 -                         if(reg_base == REG_Z)
764 -            return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Z+)   CR_TAB
765 -                            AS2 (ld,%B0,Z+) CR_TAB
766 -                            AS2 (ld,%C0,Z+) CR_TAB
767 -                            AS2 (ld,%D0,Z) CR_TAB
768 -                            AS2 (subi,r30,lo8(3)) CR_TAB
769 -                            AS2 (sbci,r31,hi8(3))) 
770 -                                                    : (AS2 (ld ,%A0,%1)   CR_TAB
771 -                            AS2 (ldd,%B0,%1+1) CR_TAB
772 -                            AS2 (ldd,%C0,%1+2) CR_TAB
773 -                            AS2 (ldd,%D0,%1+3));
774 -        }
775 +          {
776 +            if(reg_base == REG_Y) {
777 +              if (AVR_TINY) {
778 +                *l = 7;
779 +                return (AS2 (ld ,%A0,Y+)       CR_TAB
780 +                        AS2 (ld,%B0,Y+) CR_TAB
781 +                        AS2 (ld,__tmp_reg__,Y+) CR_TAB
782 +                        AS2 (ld,%D0,Y) CR_TAB
783 +                        AS2 (subi,r28,lo8(3)) CR_TAB
784 +                        AS2 (sbci,r29,hi8(3)) CR_TAB
785 +                        AS2 (mov,%C0,__tmp_reg__));
786 +              } else {
787 +                *l = 5;
788 +                return (AS2 (ld ,%A0,%1)    CR_TAB
789 +                        AS2 (ldd,%B0,%1+1) CR_TAB
790 +                        AS2 (ldd,__tmp_reg__,%1+2)  CR_TAB
791 +                        AS2 (ldd,%D0,%1+3) CR_TAB
792 +                        AS2 (mov,%C0,__tmp_reg__));
793 +              }
794 +            }
795 +
796 +            if(reg_base == REG_Z) {
797 +              if (AVR_TINY) {
798 +                *l = 7;
799 +                return (AS2 (ld ,%A0,Z+) CR_TAB
800 +                        AS2 (ld,%B0,Z+) CR_TAB
801 +                        AS2 (ld,__tmp_reg__,Z+) CR_TAB
802 +                        AS2 (ld,%D0,Z) CR_TAB
803 +                        AS2 (subi,r30,lo8(3)) CR_TAB
804 +                        AS2 (sbci,r31,hi8(3)) CR_TAB
805 +                        AS2 (mov,%C0,__tmp_reg__));
806 +              } else {
807 +                *l = 5;
808 +                return (AS2 (ld ,%A0,%1)    CR_TAB
809 +                        AS2 (ldd,%B0,%1+1) CR_TAB
810 +                        AS2 (ldd,__tmp_reg__,%1+2)  CR_TAB
811 +                        AS2 (ldd,%D0,%1+3) CR_TAB
812 +                        AS2 (mov,%C0,__tmp_reg__));
813 +              }
814 +            }
815 +          } else {
816 +            if(reg_base == REG_Y) {
817 +              if (AVR_TINY) {
818 +                *l = 6;
819 +                return (AS2 (ld ,%A0,Y+) CR_TAB
820 +                        AS2 (ld,%B0,Y+) CR_TAB
821 +                        AS2 (ld,%C0,Y+) CR_TAB
822 +                        AS2 (ld,%D0,Y) CR_TAB
823 +                        AS2 (subi,r28,lo8(3)) CR_TAB
824 +                        AS2 (sbci,r29,hi8(3)));
825 +              } else {
826 +                *l = 4;
827 +                return (AS2 (ld ,%A0,%1)   CR_TAB
828 +                        AS2 (ldd,%B0,%1+1) CR_TAB
829 +                        AS2 (ldd,%C0,%1+2) CR_TAB
830 +                        AS2 (ldd,%D0,%1+3));
831 +              }
832 +            }
833 +            if(reg_base == REG_Z) {
834 +              if (AVR_TINY) {
835 +                *l = 6;
836 +                return (AS2 (ld ,%A0,Z+) CR_TAB
837 +                        AS2 (ld,%B0,Z+) CR_TAB
838 +                        AS2 (ld,%C0,Z+) CR_TAB
839 +                        AS2 (ld,%D0,Z) CR_TAB
840 +                        AS2 (subi,r30,lo8(3)) CR_TAB
841 +                        AS2 (sbci,r31,hi8(3)));
842 +              } else {
843 +                *l = 4;
844 +                return (AS2 (ld ,%A0,%1)   CR_TAB
845 +                        AS2 (ldd,%B0,%1+1) CR_TAB
846 +                        AS2 (ldd,%C0,%1+2) CR_TAB
847 +                        AS2 (ldd,%D0,%1+3));
848 +              }
849 +            }
850 +          }
851          }
852      }
853    else if (GET_CODE (base) == PLUS) /* (R + i) */
854 @@ -2677,44 +2845,54 @@ out_movsi_r_mr (rtx insn, rtx op[], int 
855           if (REGNO (XEXP (base, 0)) != REG_Y)
856             fatal_insn ("incorrect insn:",insn);
857  
858 -         if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
859 -           return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-60))) CR_TAB
860 -                                                 AS2 (sbci,r29,hi8(-(%o1-60))) CR_TAB
861 -                          AS2 (subi,r28,lo8(-60)) CR_TAB
862 -                          AS2 (sbci,r29,hi8(-60)) CR_TAB
863 -                                                 AS2 (ld,%A0,Y+)    CR_TAB
864 -                                                 AS2 (ld,%B0,Y+)    CR_TAB
865 -                                                 AS2 (ld,%C0,Y+)    CR_TAB
866 -                                                 AS2 (ld,%D0,Y)    CR_TAB
867 -                          AS2 (subi,r28,lo8(63)) CR_TAB
868 -                          AS2 (sbci,r29,hi8(63)) CR_TAB
869 -                          AS2 (subi,r28,lo8(%o1-60)) CR_TAB
870 -                          AS2 (sbci,r29,hi8(%o1-60)))
871 -                                                  : (AS2 (adiw,r28,%o1-60) CR_TAB
872 -                           AS2 (ldd,%A0,Y+60)    CR_TAB
873 -                           AS2 (ldd,%B0,Y+61)    CR_TAB
874 -                           AS2 (ldd,%C0,Y+62)    CR_TAB
875 -                           AS2 (ldd,%D0,Y+63)    CR_TAB
876 -                           AS2 (sbiw,r28,%o1-60));
877 -
878 -         return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB
879 -                                               AS2 (sbci,r29,hi8(-%o1)) CR_TAB
880 -                                               AS2 (ld,%A0,Y+)           CR_TAB
881 -                                               AS2 (ld,%B0,Y+)           CR_TAB
882 -                                               AS2 (ld,%C0,Y+)           CR_TAB
883 -                                               AS2 (ld,%D0,Y)           CR_TAB
884 -                        AS2 (subi,r28,lo8(3))   CR_TAB
885 -                        AS2 (sbci,r29,hi8(3))   CR_TAB
886 -                                               AS2 (subi,r28,lo8(%o1))  CR_TAB
887 -                                               AS2 (sbci,r29,hi8(%o1)))
888 -                                    : (AS2 (subi,r28,lo8(-%o1)) CR_TAB
889 -                         AS2 (sbci,r29,hi8(-%o1)) CR_TAB
890 -                         AS2 (ld,%A0,Y)           CR_TAB
891 -                         AS2 (ldd,%B0,Y+1)        CR_TAB
892 -                         AS2 (ldd,%C0,Y+2)        CR_TAB
893 -                         AS2 (ldd,%D0,Y+3)        CR_TAB
894 -                         AS2 (subi,r28,lo8(%o1))  CR_TAB
895 -                         AS2 (sbci,r29,hi8(%o1)));
896 +         if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))) {
897 +           if (AVR_TINY) {
898 +              *l = 12;
899 +              return (AS2 (subi,r28,lo8(-(%o1-60))) CR_TAB
900 +                      AS2 (sbci,r29,hi8(-(%o1-60))) CR_TAB
901 +                      AS2 (subi,r28,lo8(-60)) CR_TAB
902 +                      AS2 (sbci,r29,hi8(-60)) CR_TAB
903 +                      AS2 (ld,%A0,Y+)    CR_TAB
904 +                      AS2 (ld,%B0,Y+)    CR_TAB
905 +                     AS2 (ld,%C0,Y+)    CR_TAB
906 +                      AS2 (ld,%D0,Y)    CR_TAB
907 +                      AS2 (subi,r28,lo8(63)) CR_TAB
908 +                      AS2 (sbci,r29,hi8(63)) CR_TAB
909 +                      AS2 (subi,r28,lo8(%o1-60)) CR_TAB
910 +                      AS2 (sbci,r29,hi8(%o1-60)));
911 +            } else {
912 +              *l = 6;
913 +              return (AS2 (adiw,r28,%o1-60) CR_TAB
914 +                      AS2 (ldd,%A0,Y+60)    CR_TAB
915 +                      AS2 (ldd,%B0,Y+61)    CR_TAB
916 +                      AS2 (ldd,%C0,Y+62)    CR_TAB
917 +                      AS2 (ldd,%D0,Y+63)    CR_TAB
918 +                      AS2 (sbiw,r28,%o1-60));
919 +            }
920 +          }
921 +          if (AVR_TINY) {
922 +            *l = 10;
923 +            return (AS2 (subi,r28,lo8(-%o1)) CR_TAB
924 +                    AS2 (sbci,r29,hi8(-%o1)) CR_TAB
925 +                    AS2 (ld,%A0,Y+)          CR_TAB
926 +                    AS2 (ld,%B0,Y+)          CR_TAB
927 +                    AS2 (ld,%C0,Y+)          CR_TAB
928 +                    AS2 (ld,%D0,Y)           CR_TAB
929 +                    AS2 (subi,r28,lo8(3))    CR_TAB
930 +                    AS2 (sbci,r29,hi8(3))    CR_TAB
931 +                    AS2 (subi,r28,lo8(%o1))  CR_TAB
932 +                    AS2 (sbci,r29,hi8(%o1)));
933 +          } else {
934 +            *l = 8;
935 +            return (AS2 (subi,r28,lo8(-%o1)) CR_TAB
936 +                    AS2 (sbci,r29,hi8(-%o1)) CR_TAB
937 +                    AS2 (ld,%A0,Y)           CR_TAB
938 +                    AS2 (ldd,%B0,Y+1)        CR_TAB
939 +                    AS2 (ldd,%C0,Y+2)        CR_TAB
940 +                    AS2 (ldd,%D0,Y+3)        CR_TAB
941 +                    AS2 (subi,r28,lo8(%o1))  CR_TAB
942 +                    AS2 (sbci,r29,hi8(%o1)));
943 +          }
944         }
945  
946        reg_base = true_regnum (XEXP (base, 0));
947 @@ -2722,154 +2900,204 @@ out_movsi_r_mr (rtx insn, rtx op[], int 
948         {
949           /* R = (X + d) */
950           if (reg_dest == REG_X)
951 -           {
952 -             *l = 7;
953 -             /* "ld r26,-X" is undefined */
954 -             return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1+3)))  CR_TAB
955 -                                       AS2 (sbci,r27,hi8(-(%o1+3)))  CR_TAB
956 -                                       AS2 (ld,r29,X)          CR_TAB
957 -                           AS2 (ld,r28,-X)         CR_TAB
958 -                           AS2 (ld,__tmp_reg__,-X) CR_TAB
959 -                    AS2 (subi,r26,lo8(1))   CR_TAB
960 -                    AS2 (sbci,r27,hi8(1))   CR_TAB
961 -                           AS2 (ld,r26,X)          CR_TAB
962 -                           AS2 (mov,r27,__tmp_reg__))
963 -                            : (AS2 (adiw,r26,%o1+3)    CR_TAB
964 -                     AS2 (ld,r29,X)          CR_TAB
965 -                     AS2 (ld,r28,-X)         CR_TAB
966 -                     AS2 (ld,__tmp_reg__,-X) CR_TAB
967 -                     AS2 (sbiw,r26,1)        CR_TAB
968 -                     AS2 (ld,r26,X)          CR_TAB
969 -                     AS2 (mov,r27,__tmp_reg__));
970 -           }
971 -         *l = 6;
972 -         if (reg_dest == REG_X - 2)
973 -           return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
974 -                                 AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
975 -                         AS2 (ld,r24,X+)         CR_TAB
976 -                         AS2 (ld,r25,X+)         CR_TAB
977 -                         AS2 (ld,__tmp_reg__,X+) CR_TAB
978 -                         AS2 (ld,r27,X)          CR_TAB
979 -                         AS2 (mov,r26,__tmp_reg__))
980 -                          : (AS2 (adiw,r26,%o1)      CR_TAB
981 -                   AS2 (ld,r24,X+)         CR_TAB
982 -                   AS2 (ld,r25,X+)         CR_TAB
983 -                   AS2 (ld,__tmp_reg__,X+) CR_TAB
984 -                   AS2 (ld,r27,X)          CR_TAB
985 -                   AS2 (mov,r26,__tmp_reg__));
986 -
987 -         return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
988 -                           AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
989 -                       AS2 (ld,%A0,X+)    CR_TAB
990 -                       AS2 (ld,%B0,X+)    CR_TAB
991 -                       AS2 (ld,%C0,X+)    CR_TAB
992 -                       AS2 (ld,%D0,X)     CR_TAB
993 -                       AS2 (subi,r26,lo8(%o1+3)) CR_TAB
994 -                               AS2 (sbci,r27,hi8(%o1+3)))
995 -                                : (AS2 (adiw,r26,%o1) CR_TAB
996 -                 AS2 (ld,%A0,X+)    CR_TAB
997 -                 AS2 (ld,%B0,X+)    CR_TAB
998 -                 AS2 (ld,%C0,X+)    CR_TAB
999 -                 AS2 (ld,%D0,X)     CR_TAB
1000 -                 AS2 (sbiw,r26,%o1+3));
1001 +         {
1002 +           /* "ld r26,-X" is undefined */
1003 +            if (AVR_TINY) {
1004 +              *l = 9;
1005 +              return (AS2 (subi,r26,lo8(-(%o1+3)))  CR_TAB
1006 +                      AS2 (sbci,r27,hi8(-(%o1+3)))  CR_TAB
1007 +                      AS2 (ld,r29,X)          CR_TAB
1008 +                      AS2 (ld,r28,-X)         CR_TAB
1009 +                      AS2 (ld,__tmp_reg__,-X) CR_TAB
1010 +                      AS2 (subi,r26,lo8(1))   CR_TAB
1011 +                      AS2 (sbci,r27,hi8(1))   CR_TAB
1012 +                      AS2 (ld,r26,X)          CR_TAB
1013 +                      AS2 (mov,r27,__tmp_reg__));
1014 +            } else {
1015 +              *l = 7;
1016 +              return (AS2 (adiw,r26,%o1+3)    CR_TAB
1017 +                      AS2 (ld,r29,X)          CR_TAB
1018 +                      AS2 (ld,r28,-X)         CR_TAB
1019 +                      AS2 (ld,__tmp_reg__,-X) CR_TAB
1020 +                      AS2 (sbiw,r26,1)        CR_TAB
1021 +                      AS2 (ld,r26,X)          CR_TAB
1022 +                      AS2 (mov,r27,__tmp_reg__));
1023 +            }
1024 +         }
1025 +
1026 +          if (reg_dest == REG_X - 2) {
1027 +            if (AVR_TINY) {
1028 +              *l = 7;
1029 +              return (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
1030 +                      AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
1031 +                      AS2 (ld,r24,X+)         CR_TAB
1032 +                      AS2 (ld,r25,X+)         CR_TAB
1033 +                      AS2 (ld,__tmp_reg__,X+) CR_TAB
1034 +                      AS2 (ld,r27,X)          CR_TAB
1035 +                      AS2 (mov,r26,__tmp_reg__));
1036 +            } else {
1037 +              *l = 6;
1038 +              return (AS2 (adiw,r26,%o1)      CR_TAB
1039 +                     AS2 (ld,r24,X+)         CR_TAB
1040 +                     AS2 (ld,r25,X+)         CR_TAB
1041 +                     AS2 (ld,__tmp_reg__,X+) CR_TAB
1042 +                     AS2 (ld,r27,X)          CR_TAB
1043 +                     AS2 (mov,r26,__tmp_reg__));
1044 +            }
1045 +          }
1046 +          
1047 +          if (AVR_TINY) {
1048 +            *l = 8;
1049 +            return (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
1050 +                    AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
1051 +                    AS2 (ld,%A0,X+)    CR_TAB
1052 +                    AS2 (ld,%B0,X+)    CR_TAB
1053 +                    AS2 (ld,%C0,X+)    CR_TAB
1054 +                    AS2 (ld,%D0,X)     CR_TAB
1055 +                    AS2 (subi,r26,lo8(%o1+3)) CR_TAB
1056 +                    AS2 (sbci,r27,hi8(%o1+3)));
1057 +          } else {
1058 +            *l = 6;
1059 +            return (AS2 (adiw,r26,%o1) CR_TAB
1060 +                    AS2 (ld,%A0,X+)    CR_TAB
1061 +                    AS2 (ld,%B0,X+)    CR_TAB
1062 +                    AS2 (ld,%C0,X+)    CR_TAB
1063 +                    AS2 (ld,%D0,X)     CR_TAB
1064 +                    AS2 (sbiw,r26,%o1+3));
1065 +          }
1066         }
1067        if (reg_dest == reg_base)
1068 -         {
1069 -                       op[2] = XEXP(base, 0);
1070 +      {
1071 +        op[2] = XEXP(base, 0);
1072  
1073 -               if(REGNO(op[2]) == REG_Y)
1074 -        return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
1075 -                                               AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
1076 -                                               AS2 (ld,%D0,-Y)               CR_TAB
1077 -                               AS2 (ld,%C0,-Y)               CR_TAB
1078 -                               AS2 (ld,__tmp_reg__,-Y)       CR_TAB
1079 -                               AS2 (ld,%A0,-Y)               CR_TAB
1080 -                        AS2 (subi,%A2,lo8(%o1)) CR_TAB
1081 -                                               AS2 (sbci,%B2,hi8(%o1)) CR_TAB
1082 -                        AS2 (mov,%B0,__tmp_reg__))
1083 -                                    : (AS2 (ldd,%D0,%D1) CR_TAB
1084 -                        AS2 (ldd,%C0,%C1) CR_TAB
1085 -                        AS2 (ldd,__tmp_reg__,%B1)  CR_TAB
1086 -                        AS2 (ldd,%A0,%A1) CR_TAB
1087 -                        AS2 (mov,%B0,__tmp_reg__));
1088 -               if(REGNO(op[2]) == REG_Z)
1089 -        return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
1090 -                                               AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
1091 -                                               AS2 (ld,%D0,-Z)               CR_TAB
1092 -                               AS2 (ld,%C0,-Z)               CR_TAB
1093 -                               AS2 (ld,__tmp_reg__,-Z)       CR_TAB
1094 -                               AS2 (ld,%A0,-Z)               CR_TAB
1095 -                        AS2 (subi,%A2,lo8(%o1)) CR_TAB
1096 -                                               AS2 (sbci,%B2,hi8(%o1)) CR_TAB
1097 -                        AS2 (mov,%B0,__tmp_reg__))
1098 -                                    : (AS2 (ldd,%D0,%D1)                       CR_TAB
1099 -                      AS2 (ldd,%C0,%C1) CR_TAB
1100 -                      AS2 (ldd,__tmp_reg__,%B1)  CR_TAB
1101 -                      AS2 (ldd,%A0,%A1) CR_TAB
1102 -                      AS2 (mov,%B0,__tmp_reg__));
1103 -         }
1104 -      else if (reg_dest == reg_base - 2)
1105 -         {
1106 -                       op[2] = XEXP(base, 0);
1107 +        if(REGNO(op[2]) == REG_Y) {
1108 +          if (AVR_TINY) {
1109 +            *l = 9;
1110 +            return (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
1111 +                    AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
1112 +                    AS2 (ld,%D0,-Y)              CR_TAB
1113 +                    AS2 (ld,%C0,-Y)              CR_TAB
1114 +                    AS2 (ld,__tmp_reg__,-Y)      CR_TAB
1115 +                    AS2 (ld,%A0,-Y)              CR_TAB
1116 +                    AS2 (subi,%A2,lo8(%o1))      CR_TAB
1117 +                    AS2 (sbci,%B2,hi8(%o1)) CR_TAB
1118 +                    AS2 (mov,%B0,__tmp_reg__));
1119 +          } else {
1120 +            *l = 5;
1121 +            return (AS2 (ldd,%D0,%D1) CR_TAB
1122 +                    AS2 (ldd,%C0,%C1) CR_TAB
1123 +                    AS2 (ldd,__tmp_reg__,%B1) CR_TAB
1124 +                    AS2 (ldd,%A0,%A1) CR_TAB
1125 +                    AS2 (mov,%B0,__tmp_reg__));
1126 +          }
1127 +        }
1128 +        if(REGNO(op[2]) == REG_Z) {
1129 +          if (AVR_TINY) {
1130 +            *l = 9;
1131 +            return (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
1132 +                    AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
1133 +                    AS2 (ld,%D0,-Z)              CR_TAB
1134 +                    AS2 (ld,%C0,-Z)              CR_TAB
1135 +                    AS2 (ld,__tmp_reg__,-Z)      CR_TAB
1136 +                    AS2 (ld,%A0,-Z)              CR_TAB
1137 +                    AS2 (subi,%A2,lo8(%o1)) CR_TAB
1138 +                    AS2 (sbci,%B2,hi8(%o1)) CR_TAB
1139 +                    AS2 (mov,%B0,__tmp_reg__));
1140 +          } else {
1141 +            *l = 5;
1142 +            return (AS2 (ldd,%D0,%D1) CR_TAB
1143 +                    AS2 (ldd,%C0,%C1) CR_TAB
1144 +                    AS2 (ldd,__tmp_reg__,%B1) CR_TAB
1145 +                    AS2 (ldd,%A0,%A1) CR_TAB
1146 +                    AS2 (mov,%B0,__tmp_reg__));
1147 +          }
1148 +        }
1149 +      } else if (reg_dest == reg_base - 2) {
1150 +        op[2] = XEXP(base, 0);
1151  
1152 -               if(REGNO(op[2]) == REG_Y)
1153 -        return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1)))   CR_TAB
1154 -                                               AS2 (sbci,%B2,hi8(-(%o1)))   CR_TAB
1155 -                               AS2 (ld,%A0,Y+)               CR_TAB
1156 -                               AS2 (ld,%B0,Y+)               CR_TAB
1157 -                               AS2 (ld,__tmp_reg__,Y+)       CR_TAB
1158 -                                               AS2 (ld,%D0,Y)               CR_TAB
1159 -                                               AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
1160 -                                               AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB
1161 -                        AS2 (mov,%C0,__tmp_reg__)) 
1162 -                                    : (AS2 (ldd,%A0,%A1)          CR_TAB
1163 -                      AS2 (ldd,%B0,%B1) CR_TAB
1164 -                      AS2 (ldd,__tmp_reg__,%C1)  CR_TAB
1165 -                      AS2 (ldd,%D0,%D1) CR_TAB
1166 -                      AS2 (mov,%C0,__tmp_reg__));
1167 -               if(REGNO(op[2]) == REG_Z)
1168 -        return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1)))   CR_TAB
1169 -                                               AS2 (sbci,%B2,hi8(-(%o1)))   CR_TAB
1170 -                               AS2 (ld,%A0,Z+)              CR_TAB
1171 -                               AS2 (ld,%B0,Z+)              CR_TAB
1172 -                               AS2 (ld,__tmp_reg__,Z+)      CR_TAB
1173 -                                               AS2 (ld,%D0,Z)               CR_TAB
1174 -                                               AS2 (subi,%A2,lo8(%o1+3))    CR_TAB
1175 -                                               AS2 (sbci,%B2,hi8(%o1+3))    CR_TAB
1176 -                        AS2 (mov,%C0,__tmp_reg__)) 
1177 -                                    : (AS2 (ldd,%A0,%A1)          CR_TAB
1178 -                        AS2 (ldd,%B0,%B1)          CR_TAB
1179 -                        AS2 (ldd,__tmp_reg__,%C1)  CR_TAB
1180 -                        AS2 (ldd,%D0,%D1)          CR_TAB
1181 -                        AS2 (mov,%C0,__tmp_reg__));
1182 -         }
1183 -                       op[2] = XEXP(base, 0);
1184 -               if(REGNO(op[2]) == REG_Y)
1185 -        return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1)))   CR_TAB
1186 -                                               AS2 (sbci,%B2,hi8(-(%o1)))   CR_TAB
1187 -                               AS2 (ld,%A0,Y+)               CR_TAB
1188 -                               AS2 (ld,%B0,Y+)               CR_TAB
1189 -                               AS2 (ld,%C0,Y+)               CR_TAB
1190 -                                               AS2 (ld,%D0,Y)                CR_TAB
1191 -                                               AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
1192 -                                               AS2 (sbci,%B2,hi8(%o1+3))) 
1193 -                                    : (AS2 (ldd,%A0,%A1) CR_TAB
1194 -                        AS2 (ldd,%B0,%B1) CR_TAB
1195 -                        AS2 (ldd,%C0,%C1) CR_TAB
1196 -                        AS2 (ldd,%D0,%D1));
1197 -               if(REGNO(op[2]) == REG_Z)
1198 -        return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1)))   CR_TAB
1199 -                                               AS2 (sbci,%B2,hi8(-(%o1)))   CR_TAB
1200 -                               AS2 (ld,%A0,Z+)               CR_TAB
1201 -                               AS2 (ld,%B0,Z+)               CR_TAB
1202 -                               AS2 (ld,%C0,Z+)               CR_TAB
1203 -                                               AS2 (ld,%D0,Z)               CR_TAB
1204 -                                               AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
1205 -                                               AS2 (sbci,%B2,hi8(%o1+3)))
1206 -                                    : (AS2 (ldd,%A0,%A1) CR_TAB
1207 +        if(REGNO(op[2]) == REG_Y) {
1208 +          if (AVR_TINY) {
1209 +            *l = 9;
1210 +            return (AS2 (subi,%A2,lo8(-(%o1)))   CR_TAB
1211 +                    AS2 (sbci,%B2,hi8(-(%o1)))   CR_TAB
1212 +                    AS2 (ld,%A0,Y+)              CR_TAB
1213 +                    AS2 (ld,%B0,Y+)              CR_TAB
1214 +                    AS2 (ld,__tmp_reg__,Y+)      CR_TAB
1215 +                    AS2 (ld,%D0,Y)               CR_TAB
1216 +                    AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
1217 +                    AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB
1218 +                    AS2 (mov,%C0,__tmp_reg__));
1219 +          } else {
1220 +            *l = 5;
1221 +            return (AS2 (ldd,%A0,%A1) CR_TAB
1222                      AS2 (ldd,%B0,%B1) CR_TAB
1223 -                    AS2 (ldd,%C0,%C1) CR_TAB
1224 -                    AS2 (ldd,%D0,%D1));
1225 +                    AS2 (ldd,__tmp_reg__,%C1) CR_TAB
1226 +                    AS2 (ldd,%D0,%D1) CR_TAB
1227 +                    AS2 (mov,%C0,__tmp_reg__));
1228 +          }
1229 +        }
1230 +        if(REGNO(op[2]) == REG_Z) {
1231 +          if (AVR_TINY) {
1232 +            *l = 9;
1233 +            return (AS2 (subi,%A2,lo8(-(%o1)))   CR_TAB
1234 +                    AS2 (sbci,%B2,hi8(-(%o1)))   CR_TAB
1235 +                    AS2 (ld,%A0,Z+)              CR_TAB
1236 +                    AS2 (ld,%B0,Z+)              CR_TAB
1237 +                    AS2 (ld,__tmp_reg__,Z+)      CR_TAB
1238 +                    AS2 (ld,%D0,Z)               CR_TAB
1239 +                    AS2 (subi,%A2,lo8(%o1+3))    CR_TAB
1240 +                    AS2 (sbci,%B2,hi8(%o1+3))    CR_TAB
1241 +                    AS2 (mov,%C0,__tmp_reg__));
1242 +          } else {
1243 +            *l = 5;
1244 +            return (AS2 (ldd,%A0,%A1)          CR_TAB
1245 +                    AS2 (ldd,%B0,%B1)          CR_TAB
1246 +                    AS2 (ldd,__tmp_reg__,%C1)  CR_TAB
1247 +                    AS2 (ldd,%D0,%D1)          CR_TAB
1248 +                    AS2 (mov,%C0,__tmp_reg__));
1249 +          }
1250 +        }
1251 +      }
1252 +
1253 +      op[2] = XEXP(base, 0);
1254 +      if(REGNO(op[2]) == REG_Y) {
1255 +        if (AVR_TINY) {
1256 +          *l = 8;
1257 +          return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
1258 +                  AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
1259 +                  AS2 (ld,%A0,Y+)            CR_TAB
1260 +                  AS2 (ld,%B0,Y+)            CR_TAB
1261 +                  AS2 (ld,%C0,Y+)            CR_TAB
1262 +                  AS2 (ld,%D0,Y)             CR_TAB
1263 +                  AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
1264 +                  AS2 (sbci,%B2,hi8(%o1+3)));
1265 +        } else {
1266 +          *l = 4;
1267 +          return (AS2 (ldd,%A0,%A1) CR_TAB
1268 +                  AS2 (ldd,%B0,%B1) CR_TAB
1269 +                  AS2 (ldd,%C0,%C1) CR_TAB
1270 +                  AS2 (ldd,%D0,%D1));
1271 +        }
1272 +      }
1273 +      if(REGNO(op[2]) == REG_Z) {
1274 +        if (AVR_TINY) {
1275 +          *l = 8;
1276 +          return (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
1277 +                  AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
1278 +                  AS2 (ld,%A0,Z+)            CR_TAB
1279 +                  AS2 (ld,%B0,Z+)            CR_TAB
1280 +                  AS2 (ld,%C0,Z+)            CR_TAB
1281 +                  AS2 (ld,%D0,Z)             CR_TAB
1282 +                  AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
1283 +                  AS2 (sbci,%B2,hi8(%o1+3)));
1284 +        } else {
1285 +          *l = 4;
1286 +          return (AS2 (ldd,%A0,%A1) CR_TAB
1287 +                  AS2 (ldd,%B0,%B1) CR_TAB
1288 +                  AS2 (ldd,%C0,%C1) CR_TAB
1289 +                  AS2 (ldd,%D0,%D1));
1290 +        }
1291 +      }
1292      }
1293    else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1294      return *l=4, (AS2 (ld,%D0,%1) CR_TAB
1295 @@ -2916,37 +3144,48 @@ out_movsi_mr_r (rtx insn, rtx op[], int 
1296            if (reg_src == REG_X)
1297              {
1298               /* "st X+,r26" is undefined */
1299 -              if (reg_unused_after (insn, base))
1300 -               return *l=6, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
1301 -                                   AS2 (st,X,r26)            CR_TAB
1302 -                                   AS2 (subi,r26,lo8(-1))    CR_TAB
1303 -                                   AS2 (sbci,r27,hi8(-1))    CR_TAB
1304 -                                   AS2 (st,X+,__tmp_reg__)   CR_TAB
1305 -                                   AS2 (st,X+,r28)           CR_TAB
1306 -                                   AS2 (st,X,r29))
1307 -                                    : (AS2 (mov,__tmp_reg__,r27) CR_TAB
1308 -                             AS2 (st,X,r26)            CR_TAB
1309 -                             AS2 (adiw,r26,1)          CR_TAB
1310 -                             AS2 (st,X+,__tmp_reg__)   CR_TAB
1311 -                             AS2 (st,X+,r28)           CR_TAB
1312 -                             AS2 (st,X,r29));
1313 -              else
1314 -        return *l=7, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
1315 -                                   AS2 (st,X,r26)            CR_TAB
1316 -                                   AS2 (subi,r26,lo8(-1))    CR_TAB
1317 -                                   AS2 (sbci,r27,hi8(-1))    CR_TAB
1318 -                                   AS2 (st,X+,__tmp_reg__)   CR_TAB
1319 -                                   AS2 (st,X+,r28)           CR_TAB
1320 -                                   AS2 (st,X,r29)            CR_TAB
1321 -                                   AS2 (subi,r26,lo8(3))     CR_TAB
1322 -                                   AS2 (sbci,r27,hi8(3)))
1323 -                                        : (AS2 (mov,__tmp_reg__,r27) CR_TAB
1324 -                             AS2 (st,X,r26)            CR_TAB
1325 -                             AS2 (adiw,r26,1)          CR_TAB
1326 -                             AS2 (st,X+,__tmp_reg__)   CR_TAB
1327 -                             AS2 (st,X+,r28)           CR_TAB
1328 -                             AS2 (st,X,r29)            CR_TAB
1329 -                             AS2 (sbiw,r26,3));
1330 +              if (reg_unused_after (insn, base)) {
1331 +                if (AVR_TINY) {
1332 +                  *l = 7;
1333 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
1334 +                          AS2 (st,X,r26)            CR_TAB
1335 +                          AS2 (subi,r26,lo8(-1))    CR_TAB
1336 +                          AS2 (sbci,r27,hi8(-1))    CR_TAB
1337 +                          AS2 (st,X+,__tmp_reg__)   CR_TAB
1338 +                          AS2 (st,X+,r28)           CR_TAB
1339 +                          AS2 (st,X,r29));
1340 +                } else {
1341 +                  *l = 6;
1342 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
1343 +                          AS2 (st,X,r26)            CR_TAB
1344 +                          AS2 (adiw,r26,1)          CR_TAB
1345 +                          AS2 (st,X+,__tmp_reg__)   CR_TAB
1346 +                          AS2 (st,X+,r28)           CR_TAB
1347 +                          AS2 (st,X,r29));
1348 +                }
1349 +              } else {
1350 +                if (AVR_TINY) {
1351 +                  *l = 9;
1352 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
1353 +                          AS2 (st,X,r26)            CR_TAB
1354 +                          AS2 (subi,r26,lo8(-1))    CR_TAB
1355 +                          AS2 (sbci,r27,hi8(-1))    CR_TAB
1356 +                          AS2 (st,X+,__tmp_reg__)   CR_TAB
1357 +                          AS2 (st,X+,r28)           CR_TAB
1358 +                          AS2 (st,X,r29)            CR_TAB
1359 +                          AS2 (subi,r26,lo8(3))     CR_TAB
1360 +                          AS2 (sbci,r27,hi8(3)));
1361 +                } else {
1362 +                  *l = 7;
1363 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
1364 +                          AS2 (st,X,r26)            CR_TAB
1365 +                          AS2 (adiw,r26,1)          CR_TAB
1366 +                          AS2 (st,X+,__tmp_reg__)   CR_TAB
1367 +                          AS2 (st,X+,r28)           CR_TAB
1368 +                          AS2 (st,X,r29)            CR_TAB
1369 +                          AS2 (sbiw,r26,3));
1370 +                }
1371 +              }
1372              }
1373            else if (reg_base == reg_src + 2)
1374              {
1375 @@ -2958,61 +3197,84 @@ out_movsi_mr_r (rtx insn, rtx op[], int 
1376                                AS2 (st,%0+,__zero_reg__)  CR_TAB
1377                                AS2 (st,%0,__tmp_reg__)   CR_TAB
1378                                AS1 (clr,__zero_reg__));
1379 -              else
1380 -                return *l=8, AVR_TINY ? (AS2 (mov,__zero_reg__,%C1) CR_TAB
1381 -                                AS2 (mov,__tmp_reg__,%D1)  CR_TAB
1382 -                                AS2 (st,%0+,%A1)           CR_TAB
1383 -                                AS2 (st,%0+,%B1)           CR_TAB
1384 -                                AS2 (st,%0+,__zero_reg__)  CR_TAB
1385 -                                AS2 (st,%0,__tmp_reg__)    CR_TAB
1386 -                                AS1 (clr,__zero_reg__)     CR_TAB
1387 -                                           AS2 (subi,r26,lo8(3))      CR_TAB
1388 -                                           AS2 (sbci,r27,hi8(3)))
1389 -                                                    : (AS2 (mov,__zero_reg__,%C1) CR_TAB
1390 -                              AS2 (mov,__tmp_reg__,%D1) CR_TAB
1391 -                              AS2 (st,%0+,%A1) CR_TAB
1392 -                              AS2 (st,%0+,%B1) CR_TAB
1393 -                              AS2 (st,%0+,__zero_reg__)  CR_TAB
1394 -                              AS2 (st,%0,__tmp_reg__)   CR_TAB
1395 -                              AS1 (clr,__zero_reg__)     CR_TAB
1396 -                              AS2 (sbiw,r26,3));
1397 +              else {
1398 +                if (AVR_TINY) {
1399 +                  *l = 9;
1400 +                  return (AS2 (mov,__zero_reg__,%C1) CR_TAB
1401 +                          AS2 (mov,__tmp_reg__,%D1)  CR_TAB
1402 +                          AS2 (st,%0+,%A1)           CR_TAB
1403 +                          AS2 (st,%0+,%B1)           CR_TAB
1404 +                          AS2 (st,%0+,__zero_reg__)  CR_TAB
1405 +                          AS2 (st,%0,__tmp_reg__)    CR_TAB
1406 +                          AS1 (clr,__zero_reg__)     CR_TAB
1407 +                          AS2 (subi,r26,lo8(3))      CR_TAB
1408 +                          AS2 (sbci,r27,hi8(3)));
1409 +                } else {
1410 +                  *l = 8;
1411 +                  return (AS2 (mov,__zero_reg__,%C1) CR_TAB
1412 +                          AS2 (mov,__tmp_reg__,%D1) CR_TAB
1413 +                          AS2 (st,%0+,%A1) CR_TAB
1414 +                          AS2 (st,%0+,%B1) CR_TAB
1415 +                          AS2 (st,%0+,__zero_reg__) CR_TAB
1416 +                          AS2 (st,%0,__tmp_reg__)   CR_TAB
1417 +                          AS1 (clr,__zero_reg__)    CR_TAB
1418 +                          AS2 (sbiw,r26,3));
1419 +                }
1420 +              }
1421              }
1422 -          return *l=5, AVR_TINY ? (AS2 (st,%0+,%A1)  CR_TAB
1423 -                          AS2 (st,%0+,%B1)  CR_TAB
1424 -                          AS2 (st,%0+,%C1)  CR_TAB
1425 -                          AS2 (st,%0,%D1)   CR_TAB
1426 -                                     AS2 (subi,r26,lo8(3))      CR_TAB
1427 -                                     AS2 (sbci,r27,hi8(3)))
1428 -                                          : (AS2 (st,%0+,%A1)  CR_TAB
1429 -                        AS2 (st,%0+,%B1) CR_TAB
1430 -                        AS2 (st,%0+,%C1) CR_TAB
1431 -                        AS2 (st,%0,%D1)  CR_TAB
1432 -                        AS2 (sbiw,r26,3));
1433 +          if (AVR_TINY) {
1434 +            *l = 6;
1435 +            return (AS2 (st,%0+,%A1)  CR_TAB
1436 +                    AS2 (st,%0+,%B1)  CR_TAB
1437 +                    AS2 (st,%0+,%C1)  CR_TAB
1438 +                    AS2 (st,%0,%D1)   CR_TAB
1439 +                    AS2 (subi,r26,lo8(3)) CR_TAB
1440 +                    AS2 (sbci,r27,hi8(3)));
1441 +          } else {
1442 +            *l = 5;
1443 +            return (AS2 (st,%0+,%A1) CR_TAB
1444 +                    AS2 (st,%0+,%B1) CR_TAB
1445 +                    AS2 (st,%0+,%C1) CR_TAB
1446 +                    AS2 (st,%0,%D1)  CR_TAB
1447 +                    AS2 (sbiw,r26,3));
1448 +          }
1449          }
1450        else
1451          {
1452 -                if(reg_base == REG_Y)
1453 -        return *l=4, AVR_TINY ? (AS2 (st,Y+,%A1)        CR_TAB
1454 -                               AS2 (st,Y+,%B1)        CR_TAB
1455 -                               AS2 (st,Y+,%C1)        CR_TAB
1456 -                               AS2 (st,Y,%D1)        CR_TAB
1457 -                                       AS2 (subi,r28,lo8(3)) CR_TAB
1458 -                                       AS2 (sbci,r29,lo8(3)))
1459 -                                    : (AS2 (st,%0,%A1)    CR_TAB
1460 -                               AS2 (std,%0+1,%B1) CR_TAB
1461 -                               AS2 (std,%0+2,%C1) CR_TAB
1462 -                               AS2 (std,%0+3,%D1));
1463 -                if(reg_base == REG_Z)
1464 -        return *l=4, AVR_TINY ? (AS2 (st,Z+,%A1)        CR_TAB
1465 -                               AS2 (st,Z+,%B1)        CR_TAB
1466 -                               AS2 (st,Z+,%C1)        CR_TAB
1467 -                               AS2 (st,Z,%D1)        CR_TAB
1468 -                                       AS2 (subi,r30,lo8(3)) CR_TAB
1469 -                                       AS2 (sbci,r31,lo8(3)))
1470 -                                    : (AS2 (st,%0,%A1)    CR_TAB
1471 -                     AS2 (std,%0+1,%B1) CR_TAB
1472 -                     AS2 (std,%0+2,%C1) CR_TAB
1473 -                     AS2 (std,%0+3,%D1));
1474 +        if(reg_base == REG_Y) {
1475 +          if (AVR_TINY) {
1476 +            *l = 6;
1477 +            return (AS2 (st,Y+,%A1)       CR_TAB
1478 +                    AS2 (st,Y+,%B1)       CR_TAB
1479 +                    AS2 (st,Y+,%C1)       CR_TAB
1480 +                    AS2 (st,Y,%D1)        CR_TAB
1481 +                    AS2 (subi,r28,lo8(3)) CR_TAB
1482 +                    AS2 (sbci,r29,lo8(3)));
1483 +          } else {
1484 +            *l = 4;
1485 +            return (AS2 (st,%0,%A1)    CR_TAB
1486 +                    AS2 (std,%0+1,%B1) CR_TAB
1487 +                    AS2 (std,%0+2,%C1) CR_TAB
1488 +                    AS2 (std,%0+3,%D1));
1489 +          }
1490 +        }
1491 +        if(reg_base == REG_Z) {
1492 +          if (AVR_TINY) {
1493 +            *l = 6;
1494 +            return (AS2 (st,Z+,%A1)       CR_TAB
1495 +                    AS2 (st,Z+,%B1)       CR_TAB
1496 +                    AS2 (st,Z+,%C1)       CR_TAB
1497 +                    AS2 (st,Z,%D1)        CR_TAB
1498 +                    AS2 (subi,r30,lo8(3)) CR_TAB
1499 +                    AS2 (sbci,r31,lo8(3)));
1500 +          } else {
1501 +            *l = 4;
1502 +            return (AS2 (st,%0,%A1)    CR_TAB
1503 +                    AS2 (std,%0+1,%B1) CR_TAB
1504 +                    AS2 (std,%0+2,%C1) CR_TAB
1505 +                    AS2 (std,%0+3,%D1));
1506 +          }
1507 +        }
1508      }
1509      }
1510    else if (GET_CODE (base) == PLUS) /* (R + i) */
1511 @@ -3024,62 +3286,76 @@ out_movsi_mr_r (rtx insn, rtx op[], int 
1512           if (reg_base != REG_Y)
1513             fatal_insn ("incorrect insn:",insn);
1514  
1515 -         if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
1516 -           return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-60))) CR_TAB
1517 -                                         AS2 (sbci,r29,hi8(-(%o0-60))) CR_TAB
1518 -                                         AS2 (subi,r28,lo8(-60)) CR_TAB
1519 -                                         AS2 (sbci,r29,lo8(-60)) CR_TAB
1520 -                                     AS2 (st,Y+,%A1)          CR_TAB
1521 -                                     AS2 (st,Y+,%B1)          CR_TAB
1522 -                                     AS2 (st,Y+,%C1)          CR_TAB
1523 -                                     AS2 (st,Y,%D1)          CR_TAB
1524 -                                         AS2 (subi,r28,lo8(63)) CR_TAB
1525 -                                         AS2 (sbci,r29,lo8(63)) CR_TAB
1526 -                                     AS2 (subi,r28,lo8(%o0-60)) CR_TAB 
1527 -                                     AS2 (sbci,r29,hi8(%o0-60)))  
1528 -                                      : (AS2 (adiw,r28,%o0-60) CR_TAB
1529 -                           AS2 (std,Y+60,%A1)    CR_TAB
1530 -                           AS2 (std,Y+61,%B1)    CR_TAB
1531 -                           AS2 (std,Y+62,%C1)    CR_TAB
1532 -                           AS2 (std,Y+63,%D1)    CR_TAB
1533 -                           AS2 (sbiw,r28,%o0-60));
1534 -         return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1535 -                                   AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1536 -                                   AS2 (st,Y+,%A1)           CR_TAB
1537 -                                   AS2 (st,Y+,%B1)           CR_TAB
1538 -                                   AS2 (st,Y+,%C1)           CR_TAB
1539 -                                   AS2 (st,Y,%D1)           CR_TAB
1540 -                                       AS2 (subi,r28,lo8(3))   CR_TAB
1541 -                                       AS2 (sbci,r29,lo8(3))   CR_TAB
1542 -                                   AS2 (subi,r28,lo8(%o0))  CR_TAB
1543 -                                   AS2 (sbci,r29,hi8(%o0)))
1544 -                                    : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1545 -                         AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1546 -                         AS2 (st,Y,%A1)           CR_TAB
1547 -                         AS2 (std,Y+1,%B1)        CR_TAB
1548 -                         AS2 (std,Y+2,%C1)        CR_TAB
1549 -                         AS2 (std,Y+3,%D1)        CR_TAB
1550 -                         AS2 (subi,r28,lo8(%o0))  CR_TAB
1551 -                         AS2 (sbci,r29,hi8(%o0)));
1552 +         if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) {
1553 +           if (AVR_TINY) {
1554 +              *l = 12;
1555 +              return (AS2 (subi,r28,lo8(-(%o0-60))) CR_TAB
1556 +                      AS2 (sbci,r29,hi8(-(%o0-60))) CR_TAB
1557 +                      AS2 (subi,r28,lo8(-60)) CR_TAB
1558 +                      AS2 (sbci,r29,lo8(-60)) CR_TAB
1559 +                      AS2 (st,Y+,%A1)         CR_TAB
1560 +                      AS2 (st,Y+,%B1)         CR_TAB
1561 +                      AS2 (st,Y+,%C1)         CR_TAB
1562 +                      AS2 (st,Y,%D1)          CR_TAB
1563 +                      AS2 (subi,r28,lo8(63)) CR_TAB
1564 +                      AS2 (sbci,r29,lo8(63)) CR_TAB
1565 +                      AS2 (subi,r28,lo8(%o0-60)) CR_TAB 
1566 +                      AS2 (sbci,r29,hi8(%o0-60)));
1567 +            } else {
1568 +              *l = 6;
1569 +              return (AS2 (adiw,r28,%o0-60) CR_TAB
1570 +                      AS2 (std,Y+60,%A1)    CR_TAB
1571 +                      AS2 (std,Y+61,%B1)    CR_TAB
1572 +                      AS2 (std,Y+62,%C1)    CR_TAB
1573 +                      AS2 (std,Y+63,%D1)    CR_TAB
1574 +                      AS2 (sbiw,r28,%o0-60));
1575 +            }
1576 +          }
1577 +          if (AVR_TINY) {
1578 +            *l = 10;
1579 +            return (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1580 +                   AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1581 +                   AS2 (st,Y+,%A1)           CR_TAB
1582 +                   AS2 (st,Y+,%B1)           CR_TAB
1583 +                   AS2 (st,Y+,%C1)           CR_TAB
1584 +                   AS2 (st,Y,%D1)           CR_TAB
1585 +                    AS2 (subi,r28,lo8(3))   CR_TAB
1586 +                    AS2 (sbci,r29,lo8(3))   CR_TAB
1587 +                   AS2 (subi,r28,lo8(%o0))  CR_TAB
1588 +                   AS2 (sbci,r29,hi8(%o0)));
1589 +          } else {
1590 +            *l = 8;
1591 +            return (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1592 +                    AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1593 +                   AS2 (st,Y,%A1)           CR_TAB
1594 +                   AS2 (std,Y+1,%B1)        CR_TAB
1595 +                   AS2 (std,Y+2,%C1)        CR_TAB
1596 +                   AS2 (std,Y+3,%D1)        CR_TAB
1597 +                   AS2 (subi,r28,lo8(%o0))  CR_TAB
1598 +                   AS2 (sbci,r29,hi8(%o0)));
1599 +          }
1600         }
1601        if (reg_base == REG_X)
1602         {
1603           /* (X + d) = R */
1604           if (reg_src == REG_X)
1605 -           {
1606 -             *l = 9;
1607 -             return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1608 -                           AS2 (mov,__zero_reg__,r27) CR_TAB
1609 -                       AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1610 -                               AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1611 -                           AS2 (st,X+,__tmp_reg__)    CR_TAB
1612 -                           AS2 (st,X+,__zero_reg__)   CR_TAB
1613 -                           AS2 (st,X+,r28)            CR_TAB
1614 -                           AS2 (st,X,r29)             CR_TAB
1615 -                           AS1 (clr,__zero_reg__)     CR_TAB
1616 -                                       AS2 (subi,r26,lo8(%o0+3))  CR_TAB
1617 -                           AS2 (sbci,r27,hi8(%o0+3)))
1618 -                                : (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1619 +         {
1620 +            if (AVR_TINY) {
1621 +              *l = 11;
1622 +              return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1623 +                     AS2 (mov,__zero_reg__,r27) CR_TAB
1624 +                     AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1625 +                     AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1626 +                     AS2 (st,X+,__tmp_reg__)    CR_TAB
1627 +                     AS2 (st,X+,__zero_reg__)   CR_TAB
1628 +                     AS2 (st,X+,r28)            CR_TAB
1629 +                     AS2 (st,X,r29)             CR_TAB
1630 +                     AS1 (clr,__zero_reg__)     CR_TAB
1631 +                     AS2 (subi,r26,lo8(%o0+3))  CR_TAB
1632 +                     AS2 (sbci,r27,hi8(%o0+3)));
1633 +            } else {
1634 +              *l = 9;
1635 +              return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1636                       AS2 (mov,__zero_reg__,r27) CR_TAB
1637                       AS2 (adiw,r26,%o0)         CR_TAB
1638                       AS2 (st,X+,__tmp_reg__)    CR_TAB
1639 @@ -3088,22 +3364,26 @@ out_movsi_mr_r (rtx insn, rtx op[], int 
1640                       AS2 (st,X,r29)             CR_TAB
1641                       AS1 (clr,__zero_reg__)     CR_TAB
1642                       AS2 (sbiw,r26,%o0+3));
1643 -           }
1644 +            }
1645 +         }
1646           else if (reg_src == REG_X - 2)
1647 -           {
1648 -             *l = 9;
1649 -             return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1650 -                           AS2 (mov,__zero_reg__,r27) CR_TAB
1651 -                       AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1652 -                               AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1653 -                           AS2 (st,X+,r24)            CR_TAB
1654 -                           AS2 (st,X+,r25)            CR_TAB
1655 -                           AS2 (st,X+,__tmp_reg__)    CR_TAB
1656 -                           AS2 (st,X,__zero_reg__)    CR_TAB
1657 -                           AS1 (clr,__zero_reg__)     CR_TAB
1658 -                                       AS2 (subi,r26,lo8(%o0+3)) CR_TAB
1659 -                           AS2 (sbci,r27,hi8(%o0+3)))
1660 -                                : (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1661 +         {
1662 +            if (AVR_TINY) {
1663 +             *l = 11;
1664 +             return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1665 +                     AS2 (mov,__zero_reg__,r27) CR_TAB
1666 +                     AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1667 +                     AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1668 +                     AS2 (st,X+,r24)            CR_TAB
1669 +                     AS2 (st,X+,r25)            CR_TAB
1670 +                     AS2 (st,X+,__tmp_reg__)    CR_TAB
1671 +                     AS2 (st,X,__zero_reg__)    CR_TAB
1672 +                     AS1 (clr,__zero_reg__)     CR_TAB
1673 +                     AS2 (subi,r26,lo8(%o0+3)) CR_TAB
1674 +                     AS2 (sbci,r27,hi8(%o0+3)));
1675 +            } else {
1676 +              *l = 9;
1677 +              return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
1678                       AS2 (mov,__zero_reg__,r27) CR_TAB
1679                       AS2 (adiw,r26,%o0)         CR_TAB
1680                       AS2 (st,X+,r24)            CR_TAB
1681 @@ -3112,51 +3392,71 @@ out_movsi_mr_r (rtx insn, rtx op[], int 
1682                       AS2 (st,X,__zero_reg__)    CR_TAB
1683                       AS1 (clr,__zero_reg__)     CR_TAB
1684                       AS2 (sbiw,r26,%o0+3));
1685 -           }
1686 -         *l = 6;
1687 -         return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1688 -                           AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1689 -                       AS2 (st,X+,%A1)    CR_TAB
1690 -                       AS2 (st,X+,%B1)    CR_TAB
1691 -                       AS2 (st,X+,%C1)    CR_TAB
1692 -                       AS2 (st,X,%D1)     CR_TAB
1693 -                               AS2 (subi,r26,lo8(%o0+3)) CR_TAB
1694 -                       AS2 (sbci,r27,hi8(%o0+3)))
1695 -                        : (AS2 (adiw,r26,%o0) CR_TAB
1696 -                 AS2 (st,X+,%A1)    CR_TAB
1697 -                 AS2 (st,X+,%B1)    CR_TAB
1698 -                 AS2 (st,X+,%C1)    CR_TAB
1699 -                 AS2 (st,X,%D1)     CR_TAB
1700 -                 AS2 (sbiw,r26,%o0+3));
1701 -       }
1702 -               op[2] = XEXP(base, 0);
1703 -               if(REGNO(op[2]) == REG_Y)
1704 -      return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0)))    CR_TAB
1705 -                                         AS2 (sbci,%B2,hi8(-(%o0)))    CR_TAB
1706 -                             AS2 (st,Y+,%A1)               CR_TAB
1707 -                             AS2 (st,Y+,%B1)               CR_TAB
1708 -                             AS2 (st,Y+,%C1)               CR_TAB
1709 -                             AS2 (st,Y,%D1)                CR_TAB
1710 -                                         AS2 (subi,%A2,lo8(%o0+3))     CR_TAB
1711 -                                         AS2 (sbci,%B2,hi8(%o0+3)))
1712 -                              : (AS2 (std,%A0,%A1)    CR_TAB
1713 -                             AS2 (std,%B0,%B1)    CR_TAB
1714 -                             AS2 (std,%C0,%C1)    CR_TAB
1715 -                             AS2 (std,%D0,%D1));
1716 -
1717 -               if(REGNO(op[2]) == REG_Z)
1718 -      return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0)))    CR_TAB
1719 -                                         AS2 (sbci,%B2,hi8(-(%o0)))    CR_TAB
1720 -                             AS2 (st,Z+,%A1)              CR_TAB
1721 -                             AS2 (st,Z+,%B1)             CR_TAB
1722 -                             AS2 (st,Z+,%C1)             CR_TAB
1723 -                             AS2 (st,Z,%D1)              CR_TAB
1724 -                                         AS2 (subi,%A2,lo8(%o0+3))    CR_TAB
1725 -                                         AS2 (sbci,%B2,hi8(%o0+3)))
1726 -                              : (AS2 (std,%A0,%A1)    CR_TAB
1727 -                   AS2 (std,%B0,%B1) CR_TAB
1728 -                   AS2 (std,%C0,%C1) CR_TAB
1729 -                   AS2 (std,%D0,%D1));
1730 +            }
1731 +         }
1732 +
1733 +          if (AVR_TINY) {
1734 +            *l = 8;
1735 +            return (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1736 +                   AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1737 +                   AS2 (st,X+,%A1)    CR_TAB
1738 +                   AS2 (st,X+,%B1)    CR_TAB
1739 +                   AS2 (st,X+,%C1)    CR_TAB
1740 +                   AS2 (st,X,%D1)     CR_TAB
1741 +                   AS2 (subi,r26,lo8(%o0+3)) CR_TAB
1742 +                   AS2 (sbci,r27,hi8(%o0+3)));
1743 +          } else {
1744 +            *l = 6;
1745 +            return (AS2 (adiw,r26,%o0) CR_TAB
1746 +                   AS2 (st,X+,%A1)    CR_TAB
1747 +                   AS2 (st,X+,%B1)    CR_TAB
1748 +                   AS2 (st,X+,%C1)    CR_TAB
1749 +                   AS2 (st,X,%D1)     CR_TAB
1750 +                   AS2 (sbiw,r26,%o0+3));
1751 +          }
1752 +       }
1753 +        op[2] = XEXP(base, 0);
1754 +        if(REGNO(op[2]) == REG_Y)
1755 +        {
1756 +          if (AVR_TINY) {
1757 +            *l = 8;
1758 +            return (AS2 (subi,%A2,lo8(-(%o0)))    CR_TAB
1759 +                    AS2 (sbci,%B2,hi8(-(%o0)))    CR_TAB
1760 +                   AS2 (st,Y+,%A1)               CR_TAB
1761 +                   AS2 (st,Y+,%B1)               CR_TAB
1762 +                   AS2 (st,Y+,%C1)               CR_TAB
1763 +                   AS2 (st,Y,%D1)                CR_TAB
1764 +                   AS2 (subi,%A2,lo8(%o0+3))     CR_TAB
1765 +                   AS2 (sbci,%B2,hi8(%o0+3)));
1766 +         } else {
1767 +            *l = 4;
1768 +            return (AS2 (std,%A0,%A1)    CR_TAB
1769 +                   AS2 (std,%B0,%B1)    CR_TAB
1770 +                   AS2 (std,%C0,%C1)    CR_TAB
1771 +                           AS2 (std,%D0,%D1));
1772 +          }
1773 +        }
1774 +
1775 +       if(REGNO(op[2]) == REG_Z)
1776 +        {
1777 +          if (AVR_TINY) {
1778 +            *l = 8;
1779 +            return (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1780 +                   AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1781 +                   AS2 (st,Z+,%A1)            CR_TAB
1782 +                   AS2 (st,Z+,%B1)            CR_TAB
1783 +                   AS2 (st,Z+,%C1)            CR_TAB
1784 +                   AS2 (st,Z,%D1)             CR_TAB
1785 +                   AS2 (subi,%A2,lo8(%o0+3))  CR_TAB
1786 +                   AS2 (sbci,%B2,hi8(%o0+3)));
1787 +         } else {
1788 +           *l = 4;
1789 +           return (AS2 (std,%A0,%A1) CR_TAB
1790 +                  AS2 (std,%B0,%B1) CR_TAB
1791 +                  AS2 (std,%C0,%C1) CR_TAB
1792 +                  AS2 (std,%D0,%D1));
1793 +         }
1794 +       }
1795      }
1796    else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1797      return *l=4, (AS2 (st,%0,%D1) CR_TAB
1798 @@ -3370,18 +3670,25 @@ out_movqi_mr_r (rtx insn, rtx op[], int 
1799             fatal_insn ("incorrect insn:",insn);
1800  
1801           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
1802 -           return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-63))) CR_TAB
1803 -                                         AS2 (sbci,r29,hi8(-(%o0-63))) CR_TAB
1804 -                                                 AS2 (subi,r28,lo8(-63)) CR_TAB
1805 -                                                 AS2 (sbci,r29,hi8(-63)) CR_TAB
1806 -                                     AS2 (st,Y,%1)           CR_TAB
1807 -                                                 AS2 (subi,r28,lo8(63)) CR_TAB
1808 -                                                 AS2 (sbci,r29,hi8(63)) CR_TAB
1809 -                                         AS2 (subi,r28,lo8(%o0-63)) CR_TAB
1810 -                                     AS2 (sbci,r29,hi8(%o0-63)))
1811 -                                      : (AS2 (adiw,r28,%o0-63) CR_TAB
1812 -                           AS2 (std,Y+63,%1)     CR_TAB
1813 -                           AS2 (sbiw,r28,%o0-63));
1814 +          {
1815 +           if (AVR_TINY) {
1816 +              *l = 9;
1817 +              return (AS2 (subi,r28,lo8(-(%o0-63))) CR_TAB
1818 +                     AS2 (sbci,r29,hi8(-(%o0-63))) CR_TAB
1819 +                     AS2 (subi,r28,lo8(-63)) CR_TAB
1820 +                     AS2 (sbci,r29,hi8(-63)) CR_TAB
1821 +                     AS2 (st,Y,%1)           CR_TAB
1822 +                     AS2 (subi,r28,lo8(63)) CR_TAB
1823 +                     AS2 (sbci,r29,hi8(63)) CR_TAB
1824 +                     AS2 (subi,r28,lo8(%o0-63)) CR_TAB
1825 +                     AS2 (sbci,r29,hi8(%o0-63)));
1826 +            } else {
1827 +             *l = 3;
1828 +              return (AS2 (adiw,r28,%o0-63) CR_TAB
1829 +                     AS2 (std,Y+63,%1)     CR_TAB
1830 +                     AS2 (sbiw,r28,%o0-63));
1831 +            }
1832 +          }
1833  
1834           return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
1835                           AS2 (sbci,r29,hi8(-%o0)) CR_TAB
1836 @@ -3394,60 +3701,95 @@ out_movqi_mr_r (rtx insn, rtx op[], int 
1837           if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
1838             {
1839               if (reg_unused_after (insn, XEXP (x,0)))
1840 -               return *l = 3, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB
1841 -                                     AS2 (subi,r26,lo8(-(%o0)))       CR_TAB
1842 -                                     AS2 (sbci,r27,hi8(-(%o0)))       CR_TAB
1843 -                                         AS2 (st,X,__tmp_reg__))
1844 -                                      : (AS2 (mov,__tmp_reg__,%1) CR_TAB
1845 -                               AS2 (adiw,r26,%o0)       CR_TAB
1846 -                               AS2 (st,X,__tmp_reg__));
1847 -
1848 -             return *l = 4, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB
1849 -                                       AS2 (subi,r26,lo8(-(%o0)))       CR_TAB
1850 -                                       AS2 (sbci,r27,hi8(-(%o0)))       CR_TAB
1851 -                                       AS2 (st,X,__tmp_reg__)   CR_TAB
1852 -                                       AS2 (subi,r26,lo8(%o0))       CR_TAB
1853 -                                       AS2 (sbci,r27,hi8(%o0)))
1854 -                                            : (AS2 (mov,__tmp_reg__,%1) CR_TAB
1855 -                             AS2 (adiw,r26,%o0)       CR_TAB
1856 -                             AS2 (st,X,__tmp_reg__)   CR_TAB
1857 -                             AS2 (sbiw,r26,%o0));
1858 +              {
1859 +                if (AVR_TINY) {
1860 +                  *l = 4;
1861 +                  return (AS2 (mov,__tmp_reg__,%1) CR_TAB
1862 +                         AS2 (subi,r26,lo8(-(%o0)))       CR_TAB
1863 +                         AS2 (sbci,r27,hi8(-(%o0)))       CR_TAB
1864 +                         AS2 (st,X,__tmp_reg__));
1865 +                } else {
1866 +                  *l = 3;
1867 +                  return (AS2 (mov,__tmp_reg__,%1) CR_TAB
1868 +                          AS2 (adiw,r26,%o0)       CR_TAB
1869 +                                 AS2 (st,X,__tmp_reg__));
1870 +                }
1871 +              }
1872 +
1873 +             if (AVR_TINY) {
1874 +                *l = 6;
1875 +                 return (AS2 (mov,__tmp_reg__,%1)   CR_TAB
1876 +                        AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1877 +                        AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1878 +                        AS2 (st,X,__tmp_reg__)     CR_TAB
1879 +                        AS2 (subi,r26,lo8(%o0))    CR_TAB
1880 +                        AS2 (sbci,r27,hi8(%o0)));
1881 +              } else {
1882 +                *l = 4;
1883 +                return (AS2 (mov,__tmp_reg__,%1) CR_TAB
1884 +                        AS2 (adiw,r26,%o0)       CR_TAB
1885 +                        AS2 (st,X,__tmp_reg__)   CR_TAB
1886 +                        AS2 (sbiw,r26,%o0));
1887 +              }
1888             }
1889           else
1890 -           {
1891 -             if (reg_unused_after (insn, XEXP (x,0)))
1892 -               return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0)))       CR_TAB
1893 -                                     AS2 (sbci,r27,hi8(-(%o0)))       CR_TAB
1894 -                                         AS2 (st,X,%1))
1895 -                                      : (AS2 (adiw,r26,%o0) CR_TAB
1896 -                               AS2 (st,X,%1));
1897 -
1898 -             return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0)))       CR_TAB
1899 -                                       AS2 (sbci,r27,hi8(-(%o0)))       CR_TAB
1900 -                                       AS2 (st,X,%1)                    CR_TAB
1901 -                                       AS2 (subi,r26,lo8(%o0))          CR_TAB
1902 -                                       AS2 (sbci,r27,hi8(%o0)))
1903 -                                                        : (AS2 (adiw,r26,%o0) CR_TAB
1904 -                             AS2 (st,X,%1)      CR_TAB
1905 -                             AS2 (sbiw,r26,%o0));
1906 -           }
1907 +         {
1908 +           if (reg_unused_after (insn, XEXP (x,0))) {
1909 +              if (AVR_TINY) {
1910 +                 *l = 3;
1911 +                 return (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
1912 +                         AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
1913 +                        AS2 (st,X,%1));
1914 +              } else {
1915 +                *l = 2;
1916 +                return (AS2 (adiw,r26,%o0) CR_TAB
1917 +                       AS2 (st,X,%1));
1918 +              }
1919 +            }
1920 +
1921 +            if (AVR_TINY) {
1922 +              *l = 5;
1923 +              return (AS2 (subi,r26,lo8(-(%o0)))       CR_TAB
1924 +                      AS2 (sbci,r27,hi8(-(%o0)))       CR_TAB
1925 +                      AS2 (st,X,%1)                    CR_TAB
1926 +                      AS2 (subi,r26,lo8(%o0))          CR_TAB
1927 +                      AS2 (sbci,r27,hi8(%o0)));
1928 +            } else {
1929 +              *l = 3;
1930 +              return (AS2 (adiw,r26,%o0) CR_TAB
1931 +                      AS2 (st,X,%1)      CR_TAB
1932 +                      AS2 (sbiw,r26,%o0));
1933 +            }
1934 +         }
1935         }
1936 -      *l = 1;
1937 -         op[2] = XEXP(x, 0);
1938 -         if(REGNO(op[2]) == REG_Y)
1939 -      return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1940 -                               AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1941 -                       AS2 (st,Y,%1)              CR_TAB
1942 -                AS2 (subi,%A2,lo8(%o0)) CR_TAB
1943 -                               AS2 (sbci,%B2,hi8(%o0)))
1944 -                        :  AS2 (std,%0,%1);
1945 -         if(REGNO(op[2]) == REG_Z)
1946 -      return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1947 -                               AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1948 -                       AS2 (st,Z,%1)              CR_TAB
1949 -                AS2 (subi,%A2,lo8(%o0)) CR_TAB
1950 -                               AS2 (sbci,%B2,hi8(%o0)))
1951 -                        :  AS2 (std,%0,%1);
1952 +
1953 +        op[2] = XEXP(x, 0);
1954 +        if(REGNO(op[2]) == REG_Y) {
1955 +          if (AVR_TINY) {
1956 +            *l = 5;
1957 +            return (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1958 +                   AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1959 +                   AS2 (st,Y,%1)              CR_TAB
1960 +                    AS2 (subi,%A2,lo8(%o0)) CR_TAB
1961 +                   AS2 (sbci,%B2,hi8(%o0)));
1962 +          } else {
1963 +            *l = 1;
1964 +            return (AS2 (std,%0,%1));
1965 +          }
1966 +        }
1967 +        if(REGNO(op[2]) == REG_Z) {
1968 +          if (AVR_TINY) {
1969 +            *l = 5;
1970 +            return (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
1971 +                   AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
1972 +                    AS2 (st,Z,%1)              CR_TAB
1973 +                    AS2 (subi,%A2,lo8(%o0)) CR_TAB
1974 +                    AS2 (sbci,%B2,hi8(%o0)));
1975 +          } else {
1976 +           *l = 1;
1977 +            return (AS2 (std,%0,%1));
1978 +          }
1979 +        }
1980      }
1981    *l = 1;
1982    return AS2 (st,%0,%1);
1983 @@ -3493,109 +3835,162 @@ out_movhi_mr_r (rtx insn, rtx op[], int 
1984        if (reg_base == REG_X)
1985          {
1986            if (reg_src == REG_X)
1987 -            {
1988 -              /* "st X+,r26" and "st -X,r26" are undefined.  */
1989 -              if (!mem_volatile_p && reg_unused_after (insn, src))
1990 -               return *l=4, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27)  CR_TAB
1991 -                                   AS2 (st,X,r26)             CR_TAB
1992 -                                   AS2 (subi,r26,lo8(-1))     CR_TAB
1993 -                                   AS2 (sbci,r27,hi8(-1))     CR_TAB
1994 -                                   AS2 (st,X,__tmp_reg__))
1995 -                                    : (AS2 (mov,__tmp_reg__,r27)  CR_TAB
1996 -                             AS2 (st,X,r26)            CR_TAB
1997 -                             AS2 (adiw,r26,1)          CR_TAB
1998 -                             AS2 (st,X,__tmp_reg__));
1999 -              else
2000 -               {
2001 -                  if (!AVR_XMEGA)
2002 -                   return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
2003 -                                                       AS2 (subi,r26,lo8(-1))    CR_TAB
2004 -                                                       AS2 (sbci,r27,hi8(-1))    CR_TAB
2005 -                                                       AS2 (st,X,__tmp_reg__)    CR_TAB
2006 -                                                       AS2 (subi,r26,lo8(1))     CR_TAB
2007 -                                                       AS2 (sbci,r27,hi8(1))     CR_TAB
2008 -                                                       AS2 (st,X,r26))
2009 -                                                        : (AS2 (mov,__tmp_reg__,r27) CR_TAB
2010 -                             AS2 (adiw,r26,1)          CR_TAB
2011 -                             AS2 (st,X,__tmp_reg__)    CR_TAB
2012 -                              AS2 (sbiw,r26,1)          CR_TAB
2013 -                              AS2 (st,X,r26));
2014 -                 else
2015 -                   return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
2016 -                                       AS2 (st,X,r26)            CR_TAB
2017 -                                                   AS2 (subi,r26,lo8(-1))    CR_TAB
2018 -                                                   AS2 (sbci,r27,hi8(-1))    CR_TAB
2019 -                                       AS2 (st,X,__tmp_reg__)    CR_TAB
2020 -                                                   AS2 (subi,r26,lo8(1))     CR_TAB
2021 -                                                   AS2 (sbci,r27,hi8(1)))
2022 -                                            : (AS2 (mov,__tmp_reg__,r27) CR_TAB
2023 -                                 AS2 (st,X,r26)            CR_TAB
2024 -                                 AS2 (adiw,r26,1)          CR_TAB
2025 -                                 AS2 (st,X,__tmp_reg__)    CR_TAB
2026 -                                 AS2 (sbiw,r26,1));
2027 -               }
2028 +          {
2029 +            /* "st X+,r26" and "st -X,r26" are undefined.  */
2030 +            if (!mem_volatile_p && reg_unused_after (insn, src)) {
2031 +              if (AVR_TINY) {
2032 +                *l = 5;
2033 +                return  (AS2 (mov,__tmp_reg__,r27)  CR_TAB
2034 +                         AS2 (st,X,r26)             CR_TAB
2035 +                         AS2 (subi,r26,lo8(-1))     CR_TAB
2036 +                         AS2 (sbci,r27,hi8(-1))     CR_TAB
2037 +                         AS2 (st,X,__tmp_reg__));
2038 +              } else {
2039 +                *l = 4;
2040 +                return (AS2 (mov,__tmp_reg__,r27)  CR_TAB
2041 +                        AS2 (st,X,r26)            CR_TAB
2042 +                        AS2 (adiw,r26,1)          CR_TAB
2043 +                        AS2 (st,X,__tmp_reg__));
2044 +              }
2045 +            }
2046 +            else
2047 +           {
2048 +              if (!AVR_XMEGA) {
2049 +                if (AVR_TINY) {
2050 +                  *l = 7;
2051 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
2052 +                          AS2 (subi,r26,lo8(-1))    CR_TAB
2053 +                          AS2 (sbci,r27,hi8(-1))    CR_TAB
2054 +                          AS2 (st,X,__tmp_reg__)    CR_TAB
2055 +                          AS2 (subi,r26,lo8(1))     CR_TAB
2056 +                          AS2 (sbci,r27,hi8(1))     CR_TAB
2057 +                          AS2 (st,X,r26));
2058 +                } else {
2059 +                  *l = 5;
2060 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
2061 +                          AS2 (adiw,r26,1)          CR_TAB
2062 +                          AS2 (st,X,__tmp_reg__)    CR_TAB
2063 +                          AS2 (sbiw,r26,1)          CR_TAB
2064 +                          AS2 (st,X,r26));
2065 +                }
2066 +              }
2067 +              else {
2068 +                if (AVR_TINY) {
2069 +                  *l = 7;
2070 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
2071 +                         AS2 (st,X,r26)            CR_TAB
2072 +                         AS2 (subi,r26,lo8(-1))    CR_TAB
2073 +                         AS2 (sbci,r27,hi8(-1))    CR_TAB
2074 +                         AS2 (st,X,__tmp_reg__)    CR_TAB
2075 +                         AS2 (subi,r26,lo8(1))     CR_TAB
2076 +                         AS2 (sbci,r27,hi8(1)));
2077 +                } else {
2078 +                  *l = 5;
2079 +                  return (AS2 (mov,__tmp_reg__,r27) CR_TAB
2080 +                         AS2 (st,X,r26)            CR_TAB
2081 +                         AS2 (adiw,r26,1)          CR_TAB
2082 +                         AS2 (st,X,__tmp_reg__)    CR_TAB
2083 +                         AS2 (sbiw,r26,1));
2084 +                }
2085 +              }
2086              }
2087 +          }
2088            else
2089              {
2090                if (!mem_volatile_p && reg_unused_after (insn, base))
2091                  return *l=2, (AS2 (st,X+,%A1) CR_TAB
2092                                AS2 (st,X,%B1));
2093                else
2094 -               {
2095 -                  if (!AVR_XMEGA)
2096 -                    return *l=3, AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB
2097 -                                                               AS2 (sbci,r27,hi8(-1)) CR_TAB
2098 -                                    AS2 (st,X,%B1)         CR_TAB
2099 -                                    AS2 (st,-X,%A1))
2100 -                                                            : (AS2 (adiw,r26,1) CR_TAB
2101 -                              AS2 (st,X,%B1)   CR_TAB
2102 -                              AS2 (st,-X,%A1));
2103 -                 else
2104 -                    return *l=3, AVR_TINY ? (AS2 (st,X+,%A1) CR_TAB
2105 -                                    AS2 (st,X,%B1) CR_TAB
2106 -                                                           AS2 (subi,r26,lo8(1))     CR_TAB
2107 -                                                           AS2 (sbci,r27,hi8(1)))
2108 -                                                            : (AS2 (st,X+,%A1) CR_TAB
2109 -                                  AS2 (st,X,%B1) CR_TAB
2110 -                                  AS2 (sbiw,r26,1));
2111 -               }
2112 +             {
2113 +                if (!AVR_XMEGA) {
2114 +                  if (AVR_TINY) {
2115 +                    *l = 4;
2116 +                     return (AS2 (subi,r26,lo8(-1)) CR_TAB
2117 +                             AS2 (sbci,r27,hi8(-1)) CR_TAB
2118 +                             AS2 (st,X,%B1)         CR_TAB
2119 +                             AS2 (st,-X,%A1));
2120 +                  } else {
2121 +                    *l = 3;
2122 +                    return (AS2 (adiw,r26,1) CR_TAB
2123 +                            AS2 (st,X,%B1)   CR_TAB
2124 +                            AS2 (st,-X,%A1));
2125 +                  }
2126 +                } else {
2127 +                  if (AVR_TINY) {
2128 +                    *l = 4;
2129 +                    return (AS2 (st,X+,%A1) CR_TAB
2130 +                            AS2 (st,X,%B1) CR_TAB
2131 +                                   AS2 (subi,r26,lo8(1)) CR_TAB
2132 +                            AS2 (sbci,r27,hi8(1)));
2133 +                  } else {
2134 +                    *l = 3;
2135 +                    return (AS2 (st,X+,%A1) CR_TAB
2136 +                            AS2 (st,X,%B1) CR_TAB
2137 +                            AS2 (sbiw,r26,1));
2138 +                  }
2139 +                }
2140 +             }
2141              }
2142          }
2143        else
2144          {
2145           if (!AVR_XMEGA)
2146           {
2147 -                   if(reg_base == REG_Y)
2148 -            return  *l=2, AVR_TINY ? (AS2 (subi,r28,lo8(-1))  CR_TAB
2149 -                                        AS2 (sbci,r29,hi8(-1))  CR_TAB
2150 -                             AS2 (st,Y,%B1)         CR_TAB
2151 -                             AS2 (st,-Y,%A1))
2152 -                                             : (AS2 (std,%0+1,%B1)      CR_TAB
2153 -                             AS2 (st,%0,%A1));
2154 -                   if(reg_base == REG_Z)
2155 -            return  *l=2, AVR_TINY ? (AS2 (subi,r30,lo8(-1))  CR_TAB
2156 -                                        AS2 (sbci,r31,hi8(-1))  CR_TAB
2157 -                             AS2 (st,Z,%B1)         CR_TAB
2158 -                             AS2 (st,-Z,%A1))
2159 -                                             : (AS2 (std,%0+1,%B1)      CR_TAB
2160 -                       AS2 (st,%0,%A1));
2161 +            if(reg_base == REG_Y) {
2162 +              if (AVR_TINY) {
2163 +                *l = 4;
2164 +                return (AS2 (subi,r28,lo8(-1)) CR_TAB
2165 +                       AS2 (sbci,r29,hi8(-1)) CR_TAB
2166 +                        AS2 (st,Y,%B1)         CR_TAB
2167 +                        AS2 (st,-Y,%A1));
2168 +              } else {
2169 +               *l = 2;
2170 +                return (AS2 (std,%0+1,%B1)     CR_TAB
2171 +                        AS2 (st,%0,%A1));
2172 +              }
2173 +            }
2174 +           if(reg_base == REG_Z) {
2175 +              if (AVR_TINY) {
2176 +                *l = 4;
2177 +                return (AS2 (subi,r30,lo8(-1)) CR_TAB
2178 +                       AS2 (sbci,r31,hi8(-1)) CR_TAB
2179 +                        AS2 (st,Z,%B1)         CR_TAB
2180 +                        AS2 (st,-Z,%A1));
2181 +              } else {
2182 +                *l = 2;
2183 +                return (AS2 (std,%0+1,%B1)     CR_TAB
2184 +                        AS2 (st,%0,%A1));
2185 +              }
2186 +            }
2187           }
2188           else
2189           {
2190 -                   if(reg_base == REG_Y)
2191 -            return  *l=2, AVR_TINY ? (AS2 (st,Y+,%A1)        CR_TAB
2192 -                             AS2 (st,Y,%B1)        CR_TAB
2193 -                                                AS2 (subi,r28,lo8(1))  CR_TAB
2194 -                                        AS2 (sbci,r29,hi8(1)))
2195 -                                                 : (AS2 (st,%0,%A1)    CR_TAB
2196 -                             AS2 (std,%0+1,%B1));
2197 -                   if(reg_base == REG_Z)
2198 -            return  *l=2, AVR_TINY ? (AS2 (st,Z+,%A1)         CR_TAB
2199 -                             AS2 (st,Z,%B1)         CR_TAB
2200 -                                                AS2 (subi,r30,lo8(1))  CR_TAB
2201 -                                        AS2 (sbci,r31,hi8(1)))
2202 -                                                 : (AS2 (st,%0,%A1)    CR_TAB
2203 -                           AS2 (std,%0+1,%B1));
2204 +           if(reg_base == REG_Y) {
2205 +              if (AVR_TINY) {
2206 +                *l = 4;
2207 +                return (AS2 (st,Y+,%A1)       CR_TAB
2208 +                        AS2 (st,Y,%B1)        CR_TAB
2209 +                       AS2 (subi,r28,lo8(1)) CR_TAB
2210 +                       AS2 (sbci,r29,hi8(1)));
2211 +              } else {
2212 +               *l = 2;
2213 +                return (AS2 (st,%0,%A1)    CR_TAB
2214 +                        AS2 (std,%0+1,%B1));
2215 +              }
2216 +            }
2217 +           if(reg_base == REG_Z) {
2218 +              if (AVR_TINY) {
2219 +                *l = 4;
2220 +                return (AS2 (st,Z+,%A1)        CR_TAB
2221 +                        AS2 (st,Z,%B1)         CR_TAB
2222 +                       AS2 (subi,r30,lo8(1))  CR_TAB
2223 +                       AS2 (sbci,r31,hi8(1)));
2224 +              } else {
2225 +                *l = 2;
2226 +                return (AS2 (st,%0,%A1)    CR_TAB
2227 +                        AS2 (std,%0+1,%B1));
2228 +              }
2229 +            }
2230          }
2231      }
2232      }
2233 @@ -3610,69 +4005,91 @@ out_movhi_mr_r (rtx insn, rtx op[], int 
2234  
2235            if (!AVR_XMEGA)
2236              {
2237 -         if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2238 -               return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
2239 -                                                 AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
2240 -                                                         AS2 (subi,r28,lo8(-63))       CR_TAB
2241 -                                                         AS2 (sbci,r29,hi8(-63))       CR_TAB
2242 -                                         AS2 (st,Y,%B1)                                 CR_TAB
2243 -                                         AS2 (st,-Y,%A1)                                CR_TAB
2244 -                                                         AS2 (subi,r28,lo8(62))       CR_TAB
2245 -                                                         AS2 (sbci,r29,hi8(62))       CR_TAB
2246 -                                         AS2 (subi,r28,lo8(%o0-62))    CR_TAB
2247 -                                         AS2 (sbci,r29,hi8(%o0-62)))
2248 -                                              : (AS2 (adiw,r28,%o0-62) CR_TAB
2249 -                           AS2 (std,Y+63,%B1)    CR_TAB
2250 -                           AS2 (std,Y+62,%A1)    CR_TAB
2251 -                           AS2 (sbiw,r28,%o0-62));
2252 -
2253 -             return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2254 -                                       AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2255 -                                                       AS2 (subi,r28,lo8(-1))   CR_TAB
2256 -                                                       AS2 (sbci,r29,hi8(-1))   CR_TAB
2257 -                                       AS2 (st,Y,%B1)           CR_TAB
2258 -                                       AS2 (st,-Y,%A1)           CR_TAB
2259 -                                       AS2 (subi,r28,lo8(%o0))  CR_TAB
2260 -                                       AS2 (sbci,r29,hi8(%o0)))
2261 -                                            : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2262 -                         AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2263 -                         AS2 (std,Y+1,%B1)        CR_TAB
2264 -                         AS2 (st,Y,%A1)           CR_TAB
2265 -                         AS2 (subi,r28,lo8(%o0))  CR_TAB
2266 -                         AS2 (sbci,r29,hi8(%o0)));
2267 +         if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) {
2268 +           if (AVR_TINY) {
2269 +              *l = 10;
2270 +              return (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
2271 +                     AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
2272 +                     AS2 (subi,r28,lo8(-63))       CR_TAB
2273 +                     AS2 (sbci,r29,hi8(-63))       CR_TAB
2274 +                     AS2 (st,Y,%B1)                CR_TAB
2275 +                     AS2 (st,-Y,%A1)               CR_TAB
2276 +                     AS2 (subi,r28,lo8(62))        CR_TAB
2277 +                     AS2 (sbci,r29,hi8(62))        CR_TAB
2278 +                     AS2 (subi,r28,lo8(%o0-62))    CR_TAB
2279 +                     AS2 (sbci,r29,hi8(%o0-62)));
2280 +            } else {
2281 +              *l = 4;
2282 +              return (AS2 (adiw,r28,%o0-62) CR_TAB
2283 +                     AS2 (std,Y+63,%B1)    CR_TAB
2284 +                     AS2 (std,Y+62,%A1)    CR_TAB
2285 +                     AS2 (sbiw,r28,%o0-62));
2286 +            }
2287 +          }
2288 +
2289 +             if (AVR_TINY) {
2290 +                *l = 8;
2291 +                return (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2292 +                       AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2293 +                       AS2 (subi,r28,lo8(-1))   CR_TAB
2294 +                       AS2 (sbci,r29,hi8(-1))   CR_TAB
2295 +                       AS2 (st,Y,%B1)           CR_TAB
2296 +                       AS2 (st,-Y,%A1)          CR_TAB
2297 +                       AS2 (subi,r28,lo8(%o0))  CR_TAB
2298 +                       AS2 (sbci,r29,hi8(%o0)));
2299 +              } else {
2300 +               *l = 6;
2301 +                return (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2302 +                       AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2303 +                       AS2 (std,Y+1,%B1)        CR_TAB
2304 +                       AS2 (st,Y,%A1)           CR_TAB
2305 +                       AS2 (subi,r28,lo8(%o0))  CR_TAB
2306 +                       AS2 (sbci,r29,hi8(%o0)));
2307 +              }
2308         }
2309           else
2310             {
2311 -             if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2312 -               return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
2313 -                                                 AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
2314 -                                                         AS2 (subi,r28,lo8(-62))       CR_TAB
2315 -                                                         AS2 (sbci,r29,hi8(-62))       CR_TAB
2316 -                                         AS2 (st,Y+,%A1)                                CR_TAB
2317 -                                         AS2 (st,Y,%B1)                                 CR_TAB
2318 -                                                         AS2 (subi,r28,lo8(63))       CR_TAB
2319 -                                                         AS2 (sbci,r29,hi8(63))       CR_TAB
2320 -                                         AS2 (subi,r28,lo8(%o0-62))    CR_TAB
2321 -                                         AS2 (sbci,r29,hi8(%o0-62)))
2322 -                                              : (AS2 (adiw,r28,%o0-62) CR_TAB
2323 -                               AS2 (std,Y+62,%A1)    CR_TAB
2324 -                               AS2 (std,Y+63,%B1)    CR_TAB
2325 -                               AS2 (sbiw,r28,%o0-62));
2326 +             if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))) {
2327 +               if (AVR_TINY) {
2328 +                  *l = 10; 
2329 +                  return (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
2330 +                          AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
2331 +                         AS2 (subi,r28,lo8(-62))       CR_TAB
2332 +                         AS2 (sbci,r29,hi8(-62))       CR_TAB
2333 +                          AS2 (st,Y+,%A1)              CR_TAB
2334 +                          AS2 (st,Y,%B1)               CR_TAB
2335 +                          AS2 (subi,r28,lo8(63))        CR_TAB
2336 +                                 AS2 (sbci,r29,hi8(63))        CR_TAB
2337 +                          AS2 (subi,r28,lo8(%o0-62))    CR_TAB
2338 +                          AS2 (sbci,r29,hi8(%o0-62)));
2339 +                } else {
2340 +                  *l = 4;
2341 +                  return (AS2 (adiw,r28,%o0-62) CR_TAB
2342 +                          AS2 (std,Y+62,%A1)    CR_TAB
2343 +                          AS2 (std,Y+63,%B1)    CR_TAB
2344 +                          AS2 (sbiw,r28,%o0-62));
2345 +                }
2346 +              }
2347   
2348 -             return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2349 -                                       AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2350 -                                       AS2 (st,Y+,%A1)           CR_TAB
2351 -                                       AS2 (st,Y,%B1)           CR_TAB
2352 -                                                       AS2 (subi,r28,lo8(1))   CR_TAB
2353 -                                                       AS2 (sbci,r29,hi8(1))   CR_TAB
2354 -                                       AS2 (subi,r28,lo8(%o0))  CR_TAB
2355 -                                       AS2 (sbci,r29,hi8(%o0)))
2356 -                                            : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2357 -                             AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2358 -                             AS2 (st,Y,%A1)           CR_TAB
2359 -                             AS2 (std,Y+1,%B1)        CR_TAB
2360 -                             AS2 (subi,r28,lo8(%o0))  CR_TAB
2361 -                             AS2 (sbci,r29,hi8(%o0)));
2362 +             if (AVR_TINY) {
2363 +                *l = 8;
2364 +                return (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2365 +                        AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2366 +                        AS2 (st,Y+,%A1)          CR_TAB
2367 +                        AS2 (st,Y,%B1)           CR_TAB
2368 +                        AS2 (subi,r28,lo8(1))    CR_TAB
2369 +                        AS2 (sbci,r29,hi8(1))    CR_TAB
2370 +                        AS2 (subi,r28,lo8(%o0))  CR_TAB
2371 +                        AS2 (sbci,r29,hi8(%o0)));
2372 +              } else {
2373 +                *l = 6;
2374 +                return (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2375 +                        AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2376 +                        AS2 (st,Y,%A1)           CR_TAB
2377 +                        AS2 (std,Y+1,%B1)        CR_TAB
2378 +                        AS2 (subi,r28,lo8(%o0))  CR_TAB
2379 +                        AS2 (sbci,r29,hi8(%o0)));
2380 +              }
2381             }
2382         }
2383        if (reg_base == REG_X)
2384 @@ -3681,121 +4098,164 @@ out_movhi_mr_r (rtx insn, rtx op[], int 
2385           if (reg_src == REG_X)
2386              {
2387               if (!AVR_XMEGA)
2388 -               {
2389 -             *l = 7;
2390 -                 return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2391 -                               AS2 (mov,__zero_reg__,r27) CR_TAB
2392 -                        AS2 (subi,r26,lo8(-(%o0+1)))   CR_TAB
2393 -                        AS2 (sbci,r27,hi8(-(%o0+1)))   CR_TAB
2394 -                               AS2 (st,X,__zero_reg__)    CR_TAB
2395 -                               AS2 (st,-X,__tmp_reg__)    CR_TAB
2396 -                               AS1 (clr,__zero_reg__)     CR_TAB
2397 -                        AS2 (subi,r26,lo8(%o0))    CR_TAB
2398 -                        AS2 (sbci,r27,hi8(%o0))) 
2399 -                                        : (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2400 -                     AS2 (mov,__zero_reg__,r27) CR_TAB
2401 -                      AS2 (adiw,r26,%o0+1)       CR_TAB
2402 -                     AS2 (st,X,__zero_reg__)    CR_TAB
2403 -                     AS2 (st,-X,__tmp_reg__)    CR_TAB
2404 -                     AS1 (clr,__zero_reg__)     CR_TAB
2405 -                      AS2 (sbiw,r26,%o0));
2406 -           }
2407 +             {
2408 +               if (AVR_TINY) {
2409 +                  *l = 9;
2410 +                 return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2411 +                          AS2 (mov,__zero_reg__,r27) CR_TAB
2412 +                          AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB
2413 +                          AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB
2414 +                          AS2 (st,X,__zero_reg__)    CR_TAB
2415 +                          AS2 (st,-X,__tmp_reg__)    CR_TAB
2416 +                          AS1 (clr,__zero_reg__)     CR_TAB
2417 +                          AS2 (subi,r26,lo8(%o0))    CR_TAB
2418 +                          AS2 (sbci,r27,hi8(%o0)));
2419 +                } else {
2420 +                  *l = 7;
2421 +                  return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2422 +                          AS2 (mov,__zero_reg__,r27) CR_TAB
2423 +                          AS2 (adiw,r26,%o0+1)       CR_TAB
2424 +                          AS2 (st,X,__zero_reg__)    CR_TAB
2425 +                          AS2 (st,-X,__tmp_reg__)    CR_TAB
2426 +                          AS1 (clr,__zero_reg__)     CR_TAB
2427 +                          AS2 (sbiw,r26,%o0));
2428 +                }
2429 +              }
2430               else
2431 -               {
2432 -                 *l = 7;
2433 -                         return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2434 +             {
2435 +                if (AVR_TINY) {
2436 +                 *l = 9;
2437 +                  return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2438                           AS2 (mov,__zero_reg__,r27) CR_TAB
2439 -                                       AS2 (subi,r26,lo8(-(%o0)))   CR_TAB
2440 -                                       AS2 (sbci,r27,hi8(-(%o0)))   CR_TAB
2441 +                         AS2 (subi,r26,lo8(-(%o0)))   CR_TAB
2442 +                         AS2 (sbci,r27,hi8(-(%o0)))   CR_TAB
2443                           AS2 (st,X+,__tmp_reg__)    CR_TAB
2444                           AS2 (st,X,__zero_reg__)    CR_TAB
2445                           AS1 (clr,__zero_reg__)     CR_TAB
2446 -                                       AS2 (subi,r26,lo8(%o0+1))    CR_TAB
2447 -                                       AS2 (sbci,r27,hi8(%o0+1)))
2448 -                                        : (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2449 -                                               AS2 (mov,__zero_reg__,r27) CR_TAB
2450 -                                               AS2 (adiw,r26,%o0+1)       CR_TAB
2451 -                                               AS2 (st,X+,__tmp_reg__)    CR_TAB
2452 -                                               AS2 (st,X,__zero_reg__)    CR_TAB
2453 -                                               AS1 (clr,__zero_reg__)     CR_TAB
2454 -                                               AS2 (sbiw,r26,%o0));
2455 -
2456 -               }
2457 +                         AS2 (subi,r26,lo8(%o0+1))    CR_TAB
2458 +                         AS2 (sbci,r27,hi8(%o0+1)));
2459 +                } else {
2460 +                  *l = 7;
2461 +                  return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2462 +                          AS2 (mov,__zero_reg__,r27) CR_TAB
2463 +                          AS2 (adiw,r26,%o0+1)       CR_TAB
2464 +                          AS2 (st,X+,__tmp_reg__)    CR_TAB
2465 +                          AS2 (st,X,__zero_reg__)    CR_TAB
2466 +                          AS1 (clr,__zero_reg__)     CR_TAB
2467 +                          AS2 (sbiw,r26,%o0));
2468 +                }
2469 +             }
2470             }
2471           if (!AVR_XMEGA)
2472 -            {      
2473 -         *l = 4;
2474 -              return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0+1)))   CR_TAB
2475 -                        AS2 (sbci,r27,hi8(-(%o0+1)))   CR_TAB
2476 -                        AS2 (st,X,%B1)       CR_TAB
2477 -                        AS2 (st,-X,%A1)      CR_TAB
2478 -                        AS2 (subi,r26,lo8(%o0))    CR_TAB
2479 -                        AS2 (sbci,r27,hi8(%o0))) 
2480 -                                        : (AS2 (adiw,r26,%o0+1) CR_TAB
2481 -                  AS2 (st,X,%B1)       CR_TAB
2482 -                  AS2 (st,-X,%A1)      CR_TAB
2483 -                  AS2 (sbiw,r26,%o0));
2484 -       }
2485 +          {
2486 +            if (AVR_TINY) {
2487 +              *l = 6;
2488 +              return (AS2 (subi,r26,lo8(-(%o0+1)))   CR_TAB
2489 +                      AS2 (sbci,r27,hi8(-(%o0+1)))   CR_TAB
2490 +                      AS2 (st,X,%B1)       CR_TAB
2491 +                      AS2 (st,-X,%A1)      CR_TAB
2492 +                      AS2 (subi,r26,lo8(%o0))    CR_TAB
2493 +                      AS2 (sbci,r27,hi8(%o0)));
2494 +            } else {
2495 +              *l = 4;
2496 +              return (AS2 (adiw,r26,%o0+1) CR_TAB
2497 +                      AS2 (st,X,%B1)       CR_TAB
2498 +                      AS2 (st,-X,%A1)      CR_TAB
2499 +                      AS2 (sbiw,r26,%o0));
2500 +            }
2501 +          }
2502           else
2503 -           {
2504 -             *l = 4;
2505 -              return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0)))   CR_TAB
2506 -                        AS2 (sbci,r27,hi8(-(%o0)))   CR_TAB
2507 -                        AS2 (st,X+,%A1)       CR_TAB
2508 -                        AS2 (st,X,%B1)      CR_TAB
2509 -                        AS2 (subi,r26,lo8(%o0))    CR_TAB
2510 -                        AS2 (sbci,r27,hi8(%o0))) 
2511 -                                        : (AS2 (adiw,r26,%o0) CR_TAB
2512 +          {
2513 +           if (AVR_TINY) {
2514 +              *l = 6;
2515 +              return (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
2516 +                      AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
2517 +                      AS2 (st,X+,%A1)            CR_TAB
2518 +                      AS2 (st,X,%B1)             CR_TAB
2519 +                      AS2 (subi,r26,lo8(%o0))    CR_TAB
2520 +                      AS2 (sbci,r27,hi8(%o0)));
2521 +            } else {
2522 +              *l = 4;
2523 +              return (AS2 (adiw,r26,%o0) CR_TAB
2524                       AS2 (st,X+,%A1)    CR_TAB
2525                       AS2 (st,X,%B1)     CR_TAB
2526                       AS2 (sbiw,r26,%o0+1));
2527              }
2528 +          }
2529         }
2530         
2531        if (!AVR_XMEGA)
2532 -         {
2533 -               op[2] = XEXP(base, 0);
2534 -               if(REGNO(op[2]) == REG_Y)
2535 -        return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+2)))    CR_TAB
2536 -                                               AS2 (sbci,%B2,hi8(-(%o0+2)))    CR_TAB
2537 -                                               AS2 (st,-Y,%B1)                  CR_TAB
2538 -                        AS2 (st,-Y,%A1)                  CR_TAB
2539 -                        AS2 (subi,%A2,lo8(%o0))    CR_TAB
2540 -                                               AS2 (sbci,%B2,hi8(%o0)))   
2541 -                                    : (AS2 (std,%B0,%B1)    CR_TAB
2542 +      {
2543 +       op[2] = XEXP(base, 0);
2544 +        if(REGNO(op[2]) == REG_Y)
2545 +        {
2546 +          if (AVR_TINY) {
2547 +            *l = 6;
2548 +            return (AS2 (subi,%A2,lo8(-(%o0+2))) CR_TAB
2549 +                    AS2 (sbci,%B2,hi8(-(%o0+2))) CR_TAB
2550 +                    AS2 (st,-Y,%B1)              CR_TAB
2551 +                    AS2 (st,-Y,%A1)              CR_TAB
2552 +                    AS2 (subi,%A2,lo8(%o0))      CR_TAB
2553 +                    AS2 (sbci,%B2,hi8(%o0)));
2554 +          } else {
2555 +            *l = 2;
2556 +            return (AS2 (std,%B0,%B1)    CR_TAB
2557                      AS2 (std,%A0,%A1));
2558 -               if(REGNO(op[2]) == REG_Z)
2559 -        return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+2)))    CR_TAB
2560 -                                               AS2 (sbci,%B2,hi8(-(%o0+2)))    CR_TAB
2561 -                                               AS2 (st,-Z,%B1)                  CR_TAB
2562 -                        AS2 (st,-Z,%A1)                  CR_TAB
2563 -                        AS2 (subi,%A2,lo8(%o0))    CR_TAB
2564 -                                               AS2 (sbci,%B2,hi8(%o0)))   
2565 -                                    : (AS2 (std,%B0,%B1)    CR_TAB
2566 -                        AS2 (std,%A0,%A1));
2567 -         }
2568 +          }
2569 +        }
2570 +        if(REGNO(op[2]) == REG_Z)
2571 +        {
2572 +          if (AVR_TINY) {
2573 +            *l = 6;
2574 +            return (AS2 (subi,%A2,lo8(-(%o0+2))) CR_TAB
2575 +                    AS2 (sbci,%B2,hi8(-(%o0+2))) CR_TAB
2576 +                    AS2 (st,-Z,%B1)              CR_TAB
2577 +                    AS2 (st,-Z,%A1)              CR_TAB
2578 +                    AS2 (subi,%A2,lo8(%o0))      CR_TAB
2579 +                    AS2 (sbci,%B2,hi8(%o0)));
2580 +          } else {
2581 +            *l = 2;
2582 +            return (AS2 (std,%B0,%B1)    CR_TAB
2583 +                    AS2 (std,%A0,%A1));
2584 +          }
2585 +        }
2586 +      }
2587        else
2588 -         {
2589 -               op[2] = XEXP(base, 0);
2590 -               if(REGNO(op[2]) == REG_Y)
2591 -        return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0)))    CR_TAB
2592 -                                               AS2 (sbci,%B2,hi8(-(%o0)))    CR_TAB
2593 -                        AS2 (st,Y+,%A1)                CR_TAB
2594 -                                               AS2 (st,Y,%B1)                 CR_TAB                 
2595 -                                       AS2 (subi,%A2,lo8(%o0+1))    CR_TAB
2596 -                                               AS2 (sbci,%B2,hi8(%o0+1)))
2597 -                         : (AS2 (std,%A0,%A1)    CR_TAB
2598 -                     AS2 (std,%B0,%B1));
2599 -               if(REGNO(op[2]) == REG_Z)
2600 -        return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0)))    CR_TAB
2601 -                                               AS2 (sbci,%B2,hi8(-(%o0)))    CR_TAB
2602 -                        AS2 (st,Z+,%A1)                CR_TAB
2603 -                                               AS2 (st,Z,%B1)                CR_TAB 
2604 -                                       AS2 (subi,%A2,lo8(%o0+1))    CR_TAB
2605 -                                               AS2 (sbci,%B2,hi8(%o0+1))) 
2606 -                         : (AS2 (std,%A0,%A1)    CR_TAB
2607 -                           AS2 (std,%B0,%B1));
2608 -         }
2609 +      {
2610 +        op[2] = XEXP(base, 0);
2611 +        if(REGNO(op[2]) == REG_Y)
2612 +        {
2613 +          if (AVR_TINY) {
2614 +            *l = 6;
2615 +            return (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
2616 +                    AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
2617 +                    AS2 (st,Y+,%A1)            CR_TAB
2618 +                    AS2 (st,Y,%B1)             CR_TAB                 
2619 +                    AS2 (subi,%A2,lo8(%o0+1))  CR_TAB
2620 +                    AS2 (sbci,%B2,hi8(%o0+1)));
2621 +          } else {
2622 +            *l = 2;
2623 +            return (AS2 (std,%A0,%A1)    CR_TAB
2624 +                    AS2 (std,%B0,%B1));
2625 +          }
2626 +        }
2627 +        if(REGNO(op[2]) == REG_Z)
2628 +        {
2629 +          if (AVR_TINY) {
2630 +            *l = 6;
2631 +            return (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
2632 +                    AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
2633 +                    AS2 (st,Z+,%A1)            CR_TAB
2634 +                    AS2 (st,Z,%B1)             CR_TAB 
2635 +                    AS2 (subi,%A2,lo8(%o0+1))  CR_TAB
2636 +                    AS2 (sbci,%B2,hi8(%o0+1)));
2637 +          } else {
2638 +            *l = 2;
2639 +            return (AS2 (std,%A0,%A1)    CR_TAB
2640 +                    AS2 (std,%B0,%B1));
2641 +          }
2642 +        }
2643 +      }
2644      }
2645    else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2646      {
2647 @@ -3814,33 +4274,41 @@ out_movhi_mr_r (rtx insn, rtx op[], int 
2648          {
2649            if (REGNO (XEXP (base, 0)) == REG_X)
2650              {
2651 -              *l = 4;
2652 -              return AVR_TINY ? (AS2 (subi,r26,lo8(-1))  CR_TAB
2653 -                                           AS2 (sbci,r27,hi8(-1))  CR_TAB
2654 +              if (AVR_TINY) {
2655 +                *l = 6;
2656 +                return (AS2 (subi,r26,lo8(-1))  CR_TAB
2657 +                        AS2 (sbci,r27,hi8(-1))  CR_TAB
2658                          AS2 (st,X,%B1)    CR_TAB
2659                          AS2 (st,-X,%A1)   CR_TAB
2660 -                                               AS2 (subi,r26,lo8(-2)) CR_TAB
2661 -                                               AS2 (sbci,r27,hi8(-2)))
2662 -                                        : (AS2 (adiw,r26,1)  CR_TAB
2663 -                      AS2 (st,X,%B1)    CR_TAB
2664 -                      AS2 (st,-X,%A1)   CR_TAB
2665 -                      AS2 (adiw,r26,2));
2666 +                       AS2 (subi,r26,lo8(-2)) CR_TAB
2667 +                        AS2 (sbci,r27,hi8(-2)));
2668 +              } else {
2669 +                *l = 4;
2670 +                return (AS2 (adiw,r26,1)  CR_TAB
2671 +                        AS2 (st,X,%B1)    CR_TAB
2672 +                        AS2 (st,-X,%A1)   CR_TAB
2673 +                        AS2 (adiw,r26,2));
2674 +              }
2675              }
2676            else
2677              {
2678 -                         //FIXME:check the code once again for AVR_TINY
2679 -              *l = 3;
2680 -              return AVR_TINY ? (AS2 (subi,%A0,lo8(-1)) CR_TAB
2681 -                                               AS2 (sbci,%B0,hi8(-1)) CR_TAB
2682 -                                               AS2 (st,%p0,%B1)       CR_TAB
2683 -                        AS2 (subi,%A0,lo8(1)) CR_TAB
2684 -                                               AS2 (sbci,%B0,hi8(1)) CR_TAB
2685 +              //FIXME:check the code once again for AVR_TINY
2686 +              if (AVR_TINY) {
2687 +                *l = 8;
2688 +                return (AS2 (subi,%A0,lo8(-1)) CR_TAB
2689 +                        AS2 (sbci,%B0,hi8(-1)) CR_TAB
2690 +                        AS2 (st,%p0,%B1)       CR_TAB
2691 +                        AS2 (subi,%A0,lo8(1))  CR_TAB
2692 +                        AS2 (sbci,%B0,hi8(1))  CR_TAB
2693                          AS2 (st,%p0,%A1)       CR_TAB
2694                          AS2 (subi,%A0,lo8(-3)) CR_TAB 
2695 -                                           AS2 (sbci,%B0,hi8(-3)))
2696 -                                        : (AS2 (std,%p0+1,%B1) CR_TAB
2697 -                      AS2 (st,%p0,%A1)    CR_TAB
2698 -                      AS2 (adiw,%r0,2));
2699 +                        AS2 (sbci,%B0,hi8(-3)));
2700 +              } else {
2701 +                *l = 3;
2702 +                return (AS2 (std,%p0+1,%B1) CR_TAB
2703 +                        AS2 (st,%p0,%A1)    CR_TAB
2704 +                        AS2 (adiw,%r0,2));
2705 +              }
2706              }
2707          }
2708  
2709 @@ -3927,10 +4395,14 @@ out_tsthi (rtx insn, rtx op, int *l)
2710      }
2711    if (test_hard_reg_class (ADDW_REGS, op))
2712      {
2713 -      if (l) *l = 1;
2714 -      return AVR_TINY ? (AS2 (subi,%A0,lo8(0)) CR_TAB
2715 -                               AS2 (sbci,%B0,hi8(0)))
2716 -                        :  AS2 (sbiw,%0,0);
2717 +      if (AVR_TINY) {
2718 +        if (l) *l = 2;
2719 +        return (AS2 (subi,%A0,lo8(0)) CR_TAB
2720 +                AS2 (sbci,%B0,hi8(0)));
2721 +      } else {
2722 +        if (l) *l = 1;
2723 +        return (AS2 (sbiw,%0,0));
2724 +      }
2725      }
2726    if (l) *l = 2;
2727    return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2728 @@ -3950,14 +4422,18 @@ out_tstsi (rtx insn, rtx op, int *l)
2729      }
2730    if (test_hard_reg_class (ADDW_REGS, op))
2731      {
2732 -      if (l) *l = 3;
2733 -      return AVR_TINY ? (AS2 (subi,%A0,lo8(-(-0))) CR_TAB
2734 -                               AS2 (sbci,%B0,hi8(-(-0))) CR_TAB
2735 +      if (AVR_TINY) {
2736 +        if (l) *l = 4;
2737 +        return (AS2 (subi,%A0,lo8(-(-0))) CR_TAB
2738 +                AS2 (sbci,%B0,hi8(-(-0))) CR_TAB
2739                  AS2 (cpc,%C0,__zero_reg__) CR_TAB
2740 -                AS2 (cpc,%D0,__zero_reg__))
2741 -                        : (AS2 (sbiw,%A0,0) CR_TAB
2742 -              AS2 (cpc,%C0,__zero_reg__) CR_TAB
2743 -              AS2 (cpc,%D0,__zero_reg__));
2744 +                AS2 (cpc,%D0,__zero_reg__));
2745 +      } else {
2746 +        if (l) *l = 3;
2747 +        return (AS2 (sbiw,%A0,0) CR_TAB
2748 +                AS2 (cpc,%C0,__zero_reg__) CR_TAB
2749 +                AS2 (cpc,%D0,__zero_reg__));
2750 +      }
2751      }
2752    if (l) *l = 4;
2753    return (AS2 (cp,%A0,__zero_reg__) CR_TAB
This page took 0.410961 seconds and 3 git commands to generate.