+diff -Naurp gcc/config/avr/avr.c gcc/config/avr/avr.c
+--- gcc/config/avr/avr.c 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/avr.c 2011-09-02 11:46:03.000000000 +0300
+@@ -232,8 +232,8 @@ avr_override_options (void)
+ avr_current_arch = &avr_arch_types[avr_current_device->arch];
+ avr_extra_arch_macro = avr_current_device->macro;
+
+- tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
+- zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
++ tmp_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? TMP_REGNO_AVRTINY10 : TMP_REGNO);
++ zero_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? ZERO_REGNO_AVRTINY10 : ZERO_REGNO);
+
+ init_machine_status = avr_init_machine_status;
+ }
+@@ -1641,7 +1641,7 @@ avr_simplify_comparison_p (enum machine_
+ int
+ function_arg_regno_p(int r)
+ {
+- return (r >= 8 && r <= 25);
++ return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
+ }
+
+ /* Initializing the variable cum for the state at the beginning
+@@ -1651,7 +1651,11 @@ void
+ init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
+ tree fndecl ATTRIBUTE_UNUSED)
+ {
++ if (AVR_TINY)
++ cum->nregs = 6;
++ else
+ cum->nregs = 18;
++
+ cum->regno = FIRST_CUM_REG;
+ if (!libname && fntype)
+ {
+@@ -1675,9 +1679,8 @@ avr_num_arg_regs (enum machine_mode mode
+ else
+ size = GET_MODE_SIZE (mode);
+
+- /* Align all function arguments to start in even-numbered registers.
++ /* if not AVR_TINY, Align all function arguments to start in even-numbered registers.
+ Odd-sized arguments leave holes above them. */
+-
+ return (size + 1) & ~1;
+ }
+
+@@ -2009,10 +2012,20 @@ out_movqi_r_mr (rtx insn, rtx op[], int
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+- return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
++ return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-63))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o1-63))) CR_TAB
++ AS2 (subi,r28,lo8(-63)) CR_TAB
++ AS2 (sbci,r29,hi8(-63)) CR_TAB
++ AS2 (ld,%0,Y) CR_TAB
++ AS2 (subi,r28,lo8(63)) CR_TAB
++ AS2 (sbci,r29,hi8(63)) CR_TAB
++ AS2 (subi,r28,lo8(%o1-63)) CR_TAB
++ AS2 (sbci,r29,hi8(%o1-63)))
++ : (AS2 (adiw,r28,%o1-63) CR_TAB
+ AS2 (ldd,%0,Y+63) CR_TAB
+ AS2 (sbiw,r28,%o1-63));
+
++
+ return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
+ AS2 (ld,%0,Y) CR_TAB
+@@ -2025,15 +2038,38 @@ out_movqi_r_mr (rtx insn, rtx op[], int
+ it but I have this situation with extremal optimizing options. */
+ if (reg_overlap_mentioned_p (dest, XEXP (x,0))
+ || reg_unused_after (insn, XEXP (x,0)))
+- return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
++ return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%0,X))
++ : (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%0,X));
+
+- return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
++ return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%0,X) CR_TAB
++ AS2 (subi,r26,lo8(%o1)) CR_TAB
++ AS2 (sbci,r27,hi8(%o1)))
++ : (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%0,X) CR_TAB
+ AS2 (sbiw,r26,%o1));
+ }
++
+ *l = 1;
+- return AS2 (ldd,%0,%1);
++ op[2] = XEXP(x, 0);
++ if(REGNO(op[2]) == REG_Y)
++ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%0,Y) CR_TAB
++ AS2 (subi,%A2,lo8(%o1)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1)))
++ : AS2 (ldd,%0,%1);
++ if(REGNO(op[2]) == REG_Z)
++ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%0,Z) CR_TAB
++ AS2 (subi,%A2,lo8(%o1)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1)))
++ : AS2 (ldd,%0,%1);
+ }
+ *l = 1;
+ return AS2 (ld,%0,%1);
+@@ -2073,14 +2109,34 @@ out_movhi_r_mr (rtx insn, rtx op[], int
+ AS2 (ld,%B0,X));
+ }
+ *l = 3;
+- return (AS2 (ld,%A0,X+) CR_TAB
++ return AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB
++ AS2 (ld,%B0,X) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)))
++ : (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,1));
+ }
+ else /* (R) */
+ {
+ *l = 2;
+- return (AS2 (ld,%A0,%1) CR_TAB
++ if(reg_base == REG_Y)
++ return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB
++ AS2 (subi,r28,lo8((-1))) CR_TAB
++ AS2 (sbci,r29,hi8((-1))) CR_TAB
++ AS2 (ld,%B0,%1) CR_TAB
++ AS2 (subi,r28,lo8(1)) CR_TAB
++ AS2 (sbci,r29,hi8(1)))
++ : (AS2 (ld,%A0,%1) CR_TAB
++ AS2 (ldd,%B0,%1+1));
++ if(reg_base == REG_Z)
++ return AVR_TINY ? (AS2 (ld,%A0,%1) CR_TAB
++ AS2 (subi,r30,lo8((-1))) CR_TAB
++ AS2 (sbci,r31,hi8((-1))) CR_TAB
++ AS2 (ld,%B0,%1) CR_TAB
++ AS2 (subi,r30,lo8(1)) CR_TAB
++ AS2 (sbci,r31,hi8(1)))
++ : (AS2 (ld,%A0,%1) CR_TAB
+ AS2 (ldd,%B0,%1+1));
+ }
+ }
+@@ -2095,12 +2151,30 @@ out_movhi_r_mr (rtx insn, rtx op[], int
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+- return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
++ return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-62))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o1-62))) CR_TAB
++ AS2 (subi,r28,lo8(-62)) CR_TAB
++ AS2 (sbci,r29,hi8(-62)) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y) CR_TAB
++ AS2 (subi,r28,lo8(63)) CR_TAB
++ AS2 (sbci,r29,hi8(63)) CR_TAB
++ AS2 (subi,r28,lo8(%o1-62)) CR_TAB
++ AS2 (sbci,r29,hi8(%o1-62)))
++ : (AS2 (adiw,r28,%o1-62) CR_TAB
+ AS2 (ldd,%A0,Y+62) CR_TAB
+ AS2 (ldd,%B0,Y+63) CR_TAB
+ AS2 (sbiw,r28,%o1-62));
+
+- return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
++ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB
++ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y) CR_TAB
++ AS2 (subi,r28,lo8(1)) CR_TAB
++ AS2 (sbci,r29,hi8(1)) CR_TAB
++ AS2 (subi,r28,lo8(%o1)) CR_TAB
++ AS2 (sbci,r29,hi8(%o1)))
++ : (AS2 (subi,r28,lo8(-%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
+ AS2 (ld,%A0,Y) CR_TAB
+ AS2 (ldd,%B0,Y+1) CR_TAB
+@@ -2115,12 +2189,23 @@ out_movhi_r_mr (rtx insn, rtx op[], int
+
+ *l = 4;
+ if (reg_base == reg_dest)
+- return (AS2 (adiw,r26,%o1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1)) CR_TAB
++ AS2 (sbci,r27,hi8(-%o1)) CR_TAB
++ AS2 (ld,__tmp_reg__,X+) CR_TAB
++ AS2 (ld,%B0,X) CR_TAB
++ AS2 (mov,%A0,__tmp_reg__))
++ : (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,__tmp_reg__,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
+
+- return (AS2 (adiw,r26,%o1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-%o1)) CR_TAB
++ AS2 (sbci,r27,hi8(-%o1)) CR_TAB
++ AS2 (ld,%A0,X+) CR_TAB
++ AS2 (ld,%B0,X) CR_TAB
++ AS2 (subi,r26,lo8(%o1+1)) CR_TAB
++ AS2 (sbci,r27,hi8(%o1+1)))
++ : (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,%o1+1));
+@@ -2129,14 +2214,54 @@ out_movhi_r_mr (rtx insn, rtx op[], int
+ if (reg_base == reg_dest)
+ {
+ *l = 3;
+- return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
++ op[2] = XEXP(base, 0);
++
++ if(REGNO(op[2]) == REG_Y)
++ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,__tmp_reg__,Y+) CR_TAB
++ AS2 (ld,%B0,Y) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
++ AS2 (subi,%B2,hi8(%o1+1)) CR_TAB
++ AS2 (mov,%A0,__tmp_reg__))
++ : (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
++ AS2 (ldd,%B0,%B1) CR_TAB
++ AS2 (mov,%A0,__tmp_reg__));
++ if(REGNO(op[2]) == REG_Z)
++ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,__tmp_reg__,Z+) CR_TAB
++ AS2 (ld,%B0,Z) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
++ AS2 (subi,%B2,hi8(%o1+1)) CR_TAB
++ AS2 (mov,%A0,__tmp_reg__))
++ : (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
+ }
+-
+ *l = 2;
+- return (AS2 (ldd,%A0,%A1) CR_TAB
++
++ op[2] = XEXP(base, 0);
++
++ if(REGNO(op[2]) == REG_Y)
++ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
++ AS2 (subi,%B2,hi8(%o1+1)))
++ : (AS2 (ldd,%A0,%A1) CR_TAB
++ AS2 (ldd,%B0,%B1));
++ if(REGNO(op[2]) == REG_Z)
++ return AVR_TINY ? ( AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,Z+) CR_TAB
++ AS2 (ld,%B0,Z) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+1)) CR_TAB
++ AS2 (subi,%B2,hi8(%o1+1)))
++ : (AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1));
++
+ }
+ else if (GET_CODE (base) == PRE_DEC) /* (--R) */
+ {
+@@ -2148,7 +2273,13 @@ out_movhi_r_mr (rtx insn, rtx op[], int
+ if (REGNO (XEXP (base, 0)) == REG_X)
+ {
+ *l = 4;
+- return (AS2 (sbiw,r26,2) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(2)) CR_TAB
++ AS2 (sbci,r27,hi8(2)) CR_TAB
++ AS2 (ld,%A0,X+) CR_TAB
++ AS2 (ld,%B0,X) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)))
++ : (AS2 (sbiw,r26,2) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,1));
+@@ -2156,7 +2287,16 @@ out_movhi_r_mr (rtx insn, rtx op[], int
+ else
+ {
+ *l = 3;
+- return (AS2 (sbiw,%r1,2) CR_TAB
++ //FIXME:check the code once again for AVR_TINY
++ return AVR_TINY ? (AS2 (subi,%A1,lo8(3)) CR_TAB
++ AS2 (sbci,%B1,hi8(3)) CR_TAB
++ AS2 (ld,%A0,%p1) CR_TAB
++ AS2 (subi,%A1,lo8(-1)) CR_TAB
++ AS2 (sbci,%B1,hi8(-1)) CR_TAB
++ AS2 (ld,%B0,%p1) CR_TAB
++ AS2 (subi,%A1,lo8(1)) CR_TAB
++ AS2 (sbci,%B1,hi8(1)))
++ : (AS2 (sbiw,%r1,2) CR_TAB
+ AS2 (ld,%A0,%p1) CR_TAB
+ AS2 (ldd,%B0,%p1+1));
+ }
+@@ -2212,13 +2352,23 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ {
+ if (reg_dest == REG_X)
+ /* "ld r26,-X" is undefined */
+- return *l=7, (AS2 (adiw,r26,3) CR_TAB
++ return *l=7, AVR_TINY ? (AS2 (subi,r26,lo8(-3)) CR_TAB
++ AS2 (sbci,r27,hi8(-3)) CR_TAB
++ AS2 (ld,r29,X) CR_TAB
++ AS2 (ld,r28,-X) CR_TAB
++ AS2 (ld,__tmp_reg__,-X) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)) CR_TAB
++ AS2 (ld,r26,X) CR_TAB
++ AS2 (mov,r27,__tmp_reg__))
++ : (AS2 (adiw,r26,3) CR_TAB
+ AS2 (ld,r29,X) CR_TAB
+ AS2 (ld,r28,-X) CR_TAB
+ AS2 (ld,__tmp_reg__,-X) CR_TAB
+ AS2 (sbiw,r26,1) CR_TAB
+ AS2 (ld,r26,X) CR_TAB
+ AS2 (mov,r27,__tmp_reg__));
++
+ else if (reg_dest == REG_X - 2)
+ return *l=5, (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+@@ -2231,7 +2381,13 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ AS2 (ld,%C0,X+) CR_TAB
+ AS2 (ld,%D0,X));
+ else
+- return *l=5, (AS2 (ld,%A0,X+) CR_TAB
++ return *l=5, AVR_TINY ? (AS2 (ld,%A0,X+) CR_TAB
++ AS2 (ld,%B0,X+) CR_TAB
++ AS2 (ld,%C0,X+) CR_TAB
++ AS2 (ld,%D0,X) CR_TAB
++ AS2 (subi,r26,lo8(3)) CR_TAB
++ AS2 (sbci,r27,hi8(3)))
++ : (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+ AS2 (ld,%C0,X+) CR_TAB
+ AS2 (ld,%D0,X) CR_TAB
+@@ -2240,22 +2396,97 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ else
+ {
+ if (reg_dest == reg_base)
+- return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
++ {
++ if(reg_base == REG_Y)
++ return *l=5, AVR_TINY ? (AS2 (subi,r28,lo8(-3)) CR_TAB
++ AS2 (sbci,r29,hi8(-3)) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (ld,%C0,-Y) CR_TAB
++ AS2 (subi,r28,lo8(1)) CR_TAB
++ AS2 (sbci,r29,hi8(1)) CR_TAB
++ AS2 (ld,__tmp_reg__,%1) CR_TAB
++ AS2 (subi,r28,lo8(1)) CR_TAB
++ AS2 (sbci,r29,hi8(1)) CR_TAB
++ AS2 (ld,%A0,%1) CR_TAB
++ AS2 (mov,%B0,__tmp_reg__))
++ : (AS2 (ldd,%D0,%1+3) CR_TAB
++ AS2 (ldd,%C0,%1+2) CR_TAB
++ AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
++ AS2 (ld,%A0,%1) CR_TAB
++ AS2 (mov,%B0,__tmp_reg__));
++ if(reg_base == REG_Z)
++ return *l=5, AVR_TINY ? (AS2 (subi,r30,lo8(-3)) CR_TAB
++ AS2 (sbci,r31,hi8(-3)) CR_TAB
++ AS2 (ld,%D0,Z) CR_TAB
++ AS2 (ld,%C0,-Z) CR_TAB
++ AS2 (subi,r30,lo8(1)) CR_TAB
++ AS2 (sbci,r31,hi8(1)) CR_TAB
++ AS2 (ld,__tmp_reg__,%1) CR_TAB
++ AS2 (subi,r30,lo8(1)) CR_TAB
++ AS2 (sbci,r31,hi8(1)) CR_TAB
++ AS2 (ld,%A0,%1) CR_TAB
++ AS2 (mov,%B0,__tmp_reg__))
++ : (AS2 (ldd,%D0,%1+3) CR_TAB
+ AS2 (ldd,%C0,%1+2) CR_TAB
+ AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
+ AS2 (ld,%A0,%1) CR_TAB
+ AS2 (mov,%B0,__tmp_reg__));
++ }
++
+ else if (reg_base == reg_dest + 2)
+- return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
++ {
++ if(reg_base == REG_Y)
++ return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y+) CR_TAB
++ AS2 (ld,__tmp_reg__,Y+) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (subi,r28,lo8(3)) CR_TAB
++ AS2 (sbci,r29,hi8(3)) CR_TAB
++ AS2 (mov,%C0,__tmp_reg__))
++ : (AS2 (ld ,%A0,%1) CR_TAB
++ AS2 (ldd,%B0,%1+1) CR_TAB
++ AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
++ AS2 (ldd,%D0,%1+3) CR_TAB
++ AS2 (mov,%C0,__tmp_reg__));
++ if(reg_base == REG_Z)
++ return *l=5, AVR_TINY ? (AS2 (ld ,%A0,Z+) CR_TAB
++ AS2 (ld,%B0,Z+) CR_TAB
++ AS2 (ld,__tmp_reg__,Z+) CR_TAB
++ AS2 (ld,%D0,Z) CR_TAB
++ AS2 (subi,r30,lo8(3)) CR_TAB
++ AS2 (sbci,r31,hi8(3)) CR_TAB
++ AS2 (mov,%C0,__tmp_reg__))
++ : (AS2 (ld ,%A0,%1) CR_TAB
+ AS2 (ldd,%B0,%1+1) CR_TAB
+ AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
+ AS2 (ldd,%D0,%1+3) CR_TAB
+ AS2 (mov,%C0,__tmp_reg__));
++ }
+ else
+- return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
++ {
++ if(reg_base == REG_Y)
++ return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y+) CR_TAB
++ AS2 (ld,%C0,Y+) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (subi,r28,lo8(3)) CR_TAB
++ AS2 (sbci,r29,hi8(3)))
++ : (AS2 (ld ,%A0,%1) CR_TAB
+ AS2 (ldd,%B0,%1+1) CR_TAB
+ AS2 (ldd,%C0,%1+2) CR_TAB
+ AS2 (ldd,%D0,%1+3));
++ if(reg_base == REG_Z)
++ return *l=4, AVR_TINY ? (AS2 (ld ,%A0,Z+) CR_TAB
++ AS2 (ld,%B0,Z+) CR_TAB
++ AS2 (ld,%C0,Z+) CR_TAB
++ AS2 (ld,%D0,Z) CR_TAB
++ AS2 (subi,r30,lo8(3)) CR_TAB
++ AS2 (sbci,r31,hi8(3)))
++ : (AS2 (ld ,%A0,%1) CR_TAB
++ AS2 (ldd,%B0,%1+1) CR_TAB
++ AS2 (ldd,%C0,%1+2) CR_TAB
++ AS2 (ldd,%D0,%1+3));
++ }
+ }
+ }
+ else if (GET_CODE (base) == PLUS) /* (R + i) */
+@@ -2268,14 +2499,36 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+- return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
++ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o1-60))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o1-60))) CR_TAB
++ AS2 (subi,r28,lo8(-60)) CR_TAB
++ AS2 (sbci,r29,hi8(-60)) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y+) CR_TAB
++ AS2 (ld,%C0,Y+) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (subi,r28,lo8(63)) CR_TAB
++ AS2 (sbci,r29,hi8(63)) CR_TAB
++ AS2 (subi,r28,lo8(%o1-60)) CR_TAB
++ AS2 (sbci,r29,hi8(%o1-60)))
++ : (AS2 (adiw,r28,%o1-60) CR_TAB
+ AS2 (ldd,%A0,Y+60) CR_TAB
+ AS2 (ldd,%B0,Y+61) CR_TAB
+ AS2 (ldd,%C0,Y+62) CR_TAB
+ AS2 (ldd,%D0,Y+63) CR_TAB
+ AS2 (sbiw,r28,%o1-60));
+
+- return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
++ return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o1)) CR_TAB
++ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y+) CR_TAB
++ AS2 (ld,%C0,Y+) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (subi,r28,lo8(3)) CR_TAB
++ AS2 (sbci,r29,hi8(3)) CR_TAB
++ AS2 (subi,r28,lo8(%o1)) CR_TAB
++ AS2 (sbci,r29,hi8(%o1)))
++ : (AS2 (subi,r28,lo8(-%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
+ AS2 (ld,%A0,Y) CR_TAB
+ AS2 (ldd,%B0,Y+1) CR_TAB
+@@ -2293,7 +2546,16 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ {
+ *l = 7;
+ /* "ld r26,-X" is undefined */
+- return (AS2 (adiw,r26,%o1+3) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1+3))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o1+3))) CR_TAB
++ AS2 (ld,r29,X) CR_TAB
++ AS2 (ld,r28,-X) CR_TAB
++ AS2 (ld,__tmp_reg__,-X) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)) CR_TAB
++ AS2 (ld,r26,X) CR_TAB
++ AS2 (mov,r27,__tmp_reg__))
++ : (AS2 (adiw,r26,%o1+3) CR_TAB
+ AS2 (ld,r29,X) CR_TAB
+ AS2 (ld,r28,-X) CR_TAB
+ AS2 (ld,__tmp_reg__,-X) CR_TAB
+@@ -2303,14 +2565,29 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ }
+ *l = 6;
+ if (reg_dest == REG_X - 2)
+- return (AS2 (adiw,r26,%o1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
++ AS2 (ld,r24,X+) CR_TAB
++ AS2 (ld,r25,X+) CR_TAB
++ AS2 (ld,__tmp_reg__,X+) CR_TAB
++ AS2 (ld,r27,X) CR_TAB
++ AS2 (mov,r26,__tmp_reg__))
++ : (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,r24,X+) CR_TAB
+ AS2 (ld,r25,X+) CR_TAB
+ AS2 (ld,__tmp_reg__,X+) CR_TAB
+ AS2 (ld,r27,X) CR_TAB
+ AS2 (mov,r26,__tmp_reg__));
+
+- return (AS2 (adiw,r26,%o1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o1))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,X+) CR_TAB
++ AS2 (ld,%B0,X+) CR_TAB
++ AS2 (ld,%C0,X+) CR_TAB
++ AS2 (ld,%D0,X) CR_TAB
++ AS2 (subi,r26,lo8(%o1+3)) CR_TAB
++ AS2 (sbci,r27,hi8(%o1+3)))
++ : (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+ AS2 (ld,%C0,X+) CR_TAB
+@@ -2318,18 +2595,99 @@ out_movsi_r_mr (rtx insn, rtx op[], int
+ AS2 (sbiw,r26,%o1+3));
+ }
+ if (reg_dest == reg_base)
+- return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
++ {
++ op[2] = XEXP(base, 0);
++
++ if(REGNO(op[2]) == REG_Y)
++ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
++ AS2 (ld,%D0,-Y) CR_TAB
++ AS2 (ld,%C0,-Y) CR_TAB
++ AS2 (ld,__tmp_reg__,-Y) CR_TAB
++ AS2 (ld,%A0,-Y) CR_TAB
++ AS2 (subi,%A2,lo8(%o1)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1)) CR_TAB
++ AS2 (mov,%B0,__tmp_reg__))
++ : (AS2 (ldd,%D0,%D1) CR_TAB
++ AS2 (ldd,%C0,%C1) CR_TAB
++ AS2 (ldd,__tmp_reg__,%B1) CR_TAB
++ AS2 (ldd,%A0,%A1) CR_TAB
++ AS2 (mov,%B0,__tmp_reg__));
++ if(REGNO(op[2]) == REG_Z)
++ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1+4))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o1+4))) CR_TAB
++ AS2 (ld,%D0,-Z) CR_TAB
++ AS2 (ld,%C0,-Z) CR_TAB
++ AS2 (ld,__tmp_reg__,-Z) CR_TAB
++ AS2 (ld,%A0,-Z) CR_TAB
++ AS2 (subi,%A2,lo8(%o1)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1)) CR_TAB
++ AS2 (mov,%B0,__tmp_reg__))
++ : (AS2 (ldd,%D0,%D1) CR_TAB
+ AS2 (ldd,%C0,%C1) CR_TAB
+ AS2 (ldd,__tmp_reg__,%B1) CR_TAB
+ AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (mov,%B0,__tmp_reg__));
++ }
+ else if (reg_dest == reg_base - 2)
+- return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
++ {
++ op[2] = XEXP(base, 0);
++
++ if(REGNO(op[2]) == REG_Y)
++ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y+) CR_TAB
++ AS2 (ld,__tmp_reg__,Y+) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB
++ AS2 (mov,%C0,__tmp_reg__))
++ : (AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1) CR_TAB
+ AS2 (ldd,__tmp_reg__,%C1) CR_TAB
+ AS2 (ldd,%D0,%D1) CR_TAB
+ AS2 (mov,%C0,__tmp_reg__));
+- return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
++ if(REGNO(op[2]) == REG_Z)
++ return *l=5, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,Z+) CR_TAB
++ AS2 (ld,%B0,Z+) CR_TAB
++ AS2 (ld,__tmp_reg__,Z+) CR_TAB
++ AS2 (ld,%D0,Z) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1+3)) CR_TAB
++ AS2 (mov,%C0,__tmp_reg__))
++ : (AS2 (ldd,%A0,%A1) CR_TAB
++ AS2 (ldd,%B0,%B1) CR_TAB
++ AS2 (ldd,__tmp_reg__,%C1) CR_TAB
++ AS2 (ldd,%D0,%D1) CR_TAB
++ AS2 (mov,%C0,__tmp_reg__));
++ }
++ op[2] = XEXP(base, 0);
++ if(REGNO(op[2]) == REG_Y)
++ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,Y+) CR_TAB
++ AS2 (ld,%B0,Y+) CR_TAB
++ AS2 (ld,%C0,Y+) CR_TAB
++ AS2 (ld,%D0,Y) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1+3)))
++ : (AS2 (ldd,%A0,%A1) CR_TAB
++ AS2 (ldd,%B0,%B1) CR_TAB
++ AS2 (ldd,%C0,%C1) CR_TAB
++ AS2 (ldd,%D0,%D1));
++ if(REGNO(op[2]) == REG_Z)
++ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o1))) CR_TAB
++ AS2 (subi,%B2,hi8(-(%o1))) CR_TAB
++ AS2 (ld,%A0,Z+) CR_TAB
++ AS2 (ld,%B0,Z+) CR_TAB
++ AS2 (ld,%C0,Z+) CR_TAB
++ AS2 (ld,%D0,Z) CR_TAB
++ AS2 (subi,%A2,lo8(%o1+3)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o1+3)))
++ : (AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1) CR_TAB
+ AS2 (ldd,%C0,%C1) CR_TAB
+ AS2 (ldd,%D0,%D1));
+@@ -2380,14 +2738,30 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ {
+ /* "st X+,r26" is undefined */
+ if (reg_unused_after (insn, base))
+- return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ return *l=6, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ AS2 (st,X,r26) CR_TAB
++ AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X+,__tmp_reg__) CR_TAB
++ AS2 (st,X+,r28) CR_TAB
++ AS2 (st,X,r29))
++ : (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (st,X+,r28) CR_TAB
+ AS2 (st,X,r29));
+ else
+- return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ return *l=7, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ AS2 (st,X,r26) CR_TAB
++ AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X+,__tmp_reg__) CR_TAB
++ AS2 (st,X+,r28) CR_TAB
++ AS2 (st,X,r29) CR_TAB
++ AS2 (subi,r26,lo8(3)) CR_TAB
++ AS2 (sbci,r27,hi8(3)))
++ : (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+@@ -2406,7 +2780,16 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ AS2 (st,%0,__tmp_reg__) CR_TAB
+ AS1 (clr,__zero_reg__));
+ else
+- return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
++ return *l=8, AVR_TINY ? (AS2 (mov,__zero_reg__,%C1) CR_TAB
++ AS2 (mov,__tmp_reg__,%D1) CR_TAB
++ AS2 (st,%0+,%A1) CR_TAB
++ AS2 (st,%0+,%B1) CR_TAB
++ AS2 (st,%0+,__zero_reg__) CR_TAB
++ AS2 (st,%0,__tmp_reg__) CR_TAB
++ AS1 (clr,__zero_reg__) CR_TAB
++ AS2 (subi,r26,lo8(3)) CR_TAB
++ AS2 (sbci,r27,hi8(3)))
++ : (AS2 (mov,__zero_reg__,%C1) CR_TAB
+ AS2 (mov,__tmp_reg__,%D1) CR_TAB
+ AS2 (st,%0+,%A1) CR_TAB
+ AS2 (st,%0+,%B1) CR_TAB
+@@ -2415,18 +2798,44 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ AS1 (clr,__zero_reg__) CR_TAB
+ AS2 (sbiw,r26,3));
+ }
+- return *l=5, (AS2 (st,%0+,%A1) CR_TAB
++ return *l=5, AVR_TINY ? (AS2 (st,%0+,%A1) CR_TAB
++ AS2 (st,%0+,%B1) CR_TAB
++ AS2 (st,%0+,%C1) CR_TAB
++ AS2 (st,%0,%D1) CR_TAB
++ AS2 (subi,r26,lo8(3)) CR_TAB
++ AS2 (sbci,r27,hi8(3)))
++ : (AS2 (st,%0+,%A1) CR_TAB
+ AS2 (st,%0+,%B1) CR_TAB
+ AS2 (st,%0+,%C1) CR_TAB
+ AS2 (st,%0,%D1) CR_TAB
+ AS2 (sbiw,r26,3));
+ }
+ else
+- return *l=4, (AS2 (st,%0,%A1) CR_TAB
++ {
++ if(reg_base == REG_Y)
++ return *l=4, AVR_TINY ? (AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y+,%B1) CR_TAB
++ AS2 (st,Y+,%C1) CR_TAB
++ AS2 (st,Y,%D1) CR_TAB
++ AS2 (subi,r28,lo8(3)) CR_TAB
++ AS2 (sbci,r29,lo8(3)))
++ : (AS2 (st,%0,%A1) CR_TAB
++ AS2 (std,%0+1,%B1) CR_TAB
++ AS2 (std,%0+2,%C1) CR_TAB
++ AS2 (std,%0+3,%D1));
++ if(reg_base == REG_Z)
++ return *l=4, AVR_TINY ? (AS2 (st,Z+,%A1) CR_TAB
++ AS2 (st,Z+,%B1) CR_TAB
++ AS2 (st,Z+,%C1) CR_TAB
++ AS2 (st,Z,%D1) CR_TAB
++ AS2 (subi,r30,lo8(3)) CR_TAB
++ AS2 (sbci,r31,lo8(3)))
++ : (AS2 (st,%0,%A1) CR_TAB
+ AS2 (std,%0+1,%B1) CR_TAB
+ AS2 (std,%0+2,%C1) CR_TAB
+ AS2 (std,%0+3,%D1));
+ }
++ }
+ else if (GET_CODE (base) == PLUS) /* (R + i) */
+ {
+ int disp = INTVAL (XEXP (base, 1));
+@@ -2437,14 +2846,35 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+- return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
++ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-60))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o0-60))) CR_TAB
++ AS2 (subi,r28,lo8(-60)) CR_TAB
++ AS2 (sbci,r29,lo8(-60)) CR_TAB
++ AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y+,%B1) CR_TAB
++ AS2 (st,Y+,%C1) CR_TAB
++ AS2 (st,Y,%D1) CR_TAB
++ AS2 (subi,r28,lo8(63)) CR_TAB
++ AS2 (sbci,r29,lo8(63)) CR_TAB
++ AS2 (subi,r28,lo8(%o0-60)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0-60)))
++ : (AS2 (adiw,r28,%o0-60) CR_TAB
+ AS2 (std,Y+60,%A1) CR_TAB
+ AS2 (std,Y+61,%B1) CR_TAB
+ AS2 (std,Y+62,%C1) CR_TAB
+ AS2 (std,Y+63,%D1) CR_TAB
+ AS2 (sbiw,r28,%o0-60));
+-
+- return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
++ return *l = 8, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
++ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
++ AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y+,%B1) CR_TAB
++ AS2 (st,Y+,%C1) CR_TAB
++ AS2 (st,Y,%D1) CR_TAB
++ AS2 (subi,r28,lo8(3)) CR_TAB
++ AS2 (sbci,r29,lo8(3)) CR_TAB
++ AS2 (subi,r28,lo8(%o0)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0)))
++ : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
+ AS2 (st,Y,%A1) CR_TAB
+ AS2 (std,Y+1,%B1) CR_TAB
+@@ -2459,7 +2889,18 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ if (reg_src == REG_X)
+ {
+ *l = 9;
+- return (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ AS2 (mov,__zero_reg__,r27) CR_TAB
++ AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X+,__tmp_reg__) CR_TAB
++ AS2 (st,X+,__zero_reg__) CR_TAB
++ AS2 (st,X+,r28) CR_TAB
++ AS2 (st,X,r29) CR_TAB
++ AS1 (clr,__zero_reg__) CR_TAB
++ AS2 (subi,r26,lo8(%o0+3)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0+3)))
++ : (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+@@ -2472,7 +2913,18 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ else if (reg_src == REG_X - 2)
+ {
+ *l = 9;
+- return (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ AS2 (mov,__zero_reg__,r27) CR_TAB
++ AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X+,r24) CR_TAB
++ AS2 (st,X+,r25) CR_TAB
++ AS2 (st,X+,__tmp_reg__) CR_TAB
++ AS2 (st,X,__zero_reg__) CR_TAB
++ AS1 (clr,__zero_reg__) CR_TAB
++ AS2 (subi,r26,lo8(%o0+3)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0+3)))
++ : (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,r24) CR_TAB
+@@ -2483,14 +2935,46 @@ out_movsi_mr_r (rtx insn, rtx op[], int
+ AS2 (sbiw,r26,%o0+3));
+ }
+ *l = 6;
+- return (AS2 (adiw,r26,%o0) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X+,%A1) CR_TAB
++ AS2 (st,X+,%B1) CR_TAB
++ AS2 (st,X+,%C1) CR_TAB
++ AS2 (st,X,%D1) CR_TAB
++ AS2 (subi,r26,lo8(%o0+3)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0+3)))
++ : (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,%A1) CR_TAB
+ AS2 (st,X+,%B1) CR_TAB
+ AS2 (st,X+,%C1) CR_TAB
+ AS2 (st,X,%D1) CR_TAB
+ AS2 (sbiw,r26,%o0+3));
+ }
+- return *l=4, (AS2 (std,%A0,%A1) CR_TAB
++ op[2] = XEXP(base, 0);
++ if(REGNO(op[2]) == REG_Y)
++ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
++ AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y+,%B1) CR_TAB
++ AS2 (st,Y+,%C1) CR_TAB
++ AS2 (st,Y,%D1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0+3)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0+3)))
++ : (AS2 (std,%A0,%A1) CR_TAB
++ AS2 (std,%B0,%B1) CR_TAB
++ AS2 (std,%C0,%C1) CR_TAB
++ AS2 (std,%D0,%D1));
++
++ if(REGNO(op[2]) == REG_Z)
++ return *l=4, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
++ AS2 (st,Z+,%A1) CR_TAB
++ AS2 (st,Z+,%B1) CR_TAB
++ AS2 (st,Z+,%C1) CR_TAB
++ AS2 (st,Z,%D1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0+3)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0+3)))
++ : (AS2 (std,%A0,%A1) CR_TAB
+ AS2 (std,%B0,%B1) CR_TAB
+ AS2 (std,%C0,%C1) CR_TAB
+ AS2 (std,%D0,%D1));
+@@ -2707,7 +3191,16 @@ out_movqi_mr_r (rtx insn, rtx op[], int
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+- return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
++ return *l = 3, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-63))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o0-63))) CR_TAB
++ AS2 (subi,r28,lo8(-63)) CR_TAB
++ AS2 (sbci,r29,hi8(-63)) CR_TAB
++ AS2 (st,Y,%1) CR_TAB
++ AS2 (subi,r28,lo8(63)) CR_TAB
++ AS2 (sbci,r29,hi8(63)) CR_TAB
++ AS2 (subi,r28,lo8(%o0-63)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0-63)))
++ : (AS2 (adiw,r28,%o0-63) CR_TAB
+ AS2 (std,Y+63,%1) CR_TAB
+ AS2 (sbiw,r28,%o0-63));
+
+@@ -2722,11 +3215,21 @@ out_movqi_mr_r (rtx insn, rtx op[], int
+ if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
+ {
+ if (reg_unused_after (insn, XEXP (x,0)))
+- return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
++ return *l = 3, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB
++ AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X,__tmp_reg__))
++ : (AS2 (mov,__tmp_reg__,%1) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,__tmp_reg__));
+
+- return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
++ return *l = 4, AVR_TINY ? (AS2 (mov,__tmp_reg__,%1) CR_TAB
++ AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X,__tmp_reg__) CR_TAB
++ AS2 (subi,r26,lo8(%o0)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0)))
++ : (AS2 (mov,__tmp_reg__,%1) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,__tmp_reg__) CR_TAB
+ AS2 (sbiw,r26,%o0));
+@@ -2734,16 +3237,38 @@ out_movqi_mr_r (rtx insn, rtx op[], int
+ else
+ {
+ if (reg_unused_after (insn, XEXP (x,0)))
+- return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
++ return *l = 2, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X,%1))
++ : (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,%1));
+
+- return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
++ return *l = 3, AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X,%1) CR_TAB
++ AS2 (subi,r26,lo8(%o0)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0)))
++ : (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,%1) CR_TAB
+ AS2 (sbiw,r26,%o0));
+ }
+ }
+ *l = 1;
+- return AS2 (std,%0,%1);
++ op[2] = XEXP(x, 0);
++ if(REGNO(op[2]) == REG_Y)
++ return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
++ AS2 (st,Y,%1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0)))
++ : AS2 (std,%0,%1);
++ if(REGNO(op[2]) == REG_Z)
++ return AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
++ AS2 (st,Z,%1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0)))
++ : AS2 (std,%0,%1);
+ }
+ *l = 1;
+ return AS2 (st,%0,%1);
+@@ -2792,20 +3317,39 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ {
+ /* "st X+,r26" and "st -X,r26" are undefined. */
+ if (!mem_volatile_p && reg_unused_after (insn, src))
+- return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ return *l=4, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ AS2 (st,X,r26) CR_TAB
++ AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X,__tmp_reg__))
++ : (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,__tmp_reg__));
+ else
+ {
+ if (!AVR_XMEGA)
+- return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X,__tmp_reg__) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)) CR_TAB
++ AS2 (st,X,r26))
++ : (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,__tmp_reg__) CR_TAB
+ AS2 (sbiw,r26,1) CR_TAB
+ AS2 (st,X,r26));
+ else
+- return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ return *l=5, AVR_TINY ? (AS2 (mov,__tmp_reg__,r27) CR_TAB
++ AS2 (st,X,r26) CR_TAB
++ AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X,__tmp_reg__) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)))
++ : (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,__tmp_reg__) CR_TAB
+@@ -2820,11 +3364,19 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ else
+ {
+ if (!AVR_XMEGA)
+- return *l=3, (AS2 (adiw,r26,1) CR_TAB
++ return *l=3, AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X,%B1) CR_TAB
++ AS2 (st,-X,%A1))
++ : (AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1));
+ else
+- return *l=3, (AS2 (st,X+,%A1) CR_TAB
++ return *l=3, AVR_TINY ? (AS2 (st,X+,%A1) CR_TAB
++ AS2 (st,X,%B1) CR_TAB
++ AS2 (subi,r26,lo8(1)) CR_TAB
++ AS2 (sbci,r27,hi8(1)))
++ : (AS2 (st,X+,%A1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (sbiw,r26,1));
+ }
+@@ -2833,13 +3385,41 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ else
+ {
+ if (!AVR_XMEGA)
+- return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
++ {
++ if(reg_base == REG_Y)
++ return *l=2, AVR_TINY ? (AS2 (subi,r28,lo8(-1)) CR_TAB
++ AS2 (sbci,r29,hi8(-1)) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (st,-Y,%A1))
++ : (AS2 (std,%0+1,%B1) CR_TAB
++ AS2 (st,%0,%A1));
++ if(reg_base == REG_Z)
++ return *l=2, AVR_TINY ? (AS2 (subi,r30,lo8(-1)) CR_TAB
++ AS2 (sbci,r31,hi8(-1)) CR_TAB
++ AS2 (st,Z,%B1) CR_TAB
++ AS2 (st,-Z,%A1))
++ : (AS2 (std,%0+1,%B1) CR_TAB
+ AS2 (st,%0,%A1));
++ }
+ else
+- return *l=2, (AS2 (st,%0,%A1) CR_TAB
++ {
++ if(reg_base == REG_Y)
++ return *l=2, AVR_TINY ? (AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (subi,r28,lo8(1)) CR_TAB
++ AS2 (sbci,r29,hi8(1)))
++ : (AS2 (st,%0,%A1) CR_TAB
++ AS2 (std,%0+1,%B1));
++ if(reg_base == REG_Z)
++ return *l=2, AVR_TINY ? (AS2 (st,Z+,%A1) CR_TAB
++ AS2 (st,Z,%B1) CR_TAB
++ AS2 (subi,r30,lo8(1)) CR_TAB
++ AS2 (sbci,r31,hi8(1)))
++ : (AS2 (st,%0,%A1) CR_TAB
+ AS2 (std,%0+1,%B1));
+ }
+ }
++ }
+ else if (GET_CODE (base) == PLUS)
+ {
+ int disp = INTVAL (XEXP (base, 1));
+@@ -2852,12 +3432,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ if (!AVR_XMEGA)
+ {
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+- return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
++ return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
++ AS2 (subi,r28,lo8(-63)) CR_TAB
++ AS2 (sbci,r29,hi8(-63)) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (st,-Y,%A1) CR_TAB
++ AS2 (subi,r28,lo8(62)) CR_TAB
++ AS2 (sbci,r29,hi8(62)) CR_TAB
++ AS2 (subi,r28,lo8(%o0-62)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0-62)))
++ : (AS2 (adiw,r28,%o0-62) CR_TAB
+ AS2 (std,Y+63,%B1) CR_TAB
+ AS2 (std,Y+62,%A1) CR_TAB
+ AS2 (sbiw,r28,%o0-62));
+
+- return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
++ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
++ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
++ AS2 (subi,r28,lo8(-1)) CR_TAB
++ AS2 (sbci,r29,hi8(-1)) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (st,-Y,%A1) CR_TAB
++ AS2 (subi,r28,lo8(%o0)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0)))
++ : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
+ AS2 (std,Y+1,%B1) CR_TAB
+ AS2 (st,Y,%A1) CR_TAB
+@@ -2867,12 +3465,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ else
+ {
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+- return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
++ return *l = 4, AVR_TINY ? (AS2 (subi,r28,lo8(-(%o0-62))) CR_TAB
++ AS2 (sbci,r29,hi8(-(%o0-62))) CR_TAB
++ AS2 (subi,r28,lo8(-62)) CR_TAB
++ AS2 (sbci,r29,hi8(-62)) CR_TAB
++ AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (subi,r28,lo8(63)) CR_TAB
++ AS2 (sbci,r29,hi8(63)) CR_TAB
++ AS2 (subi,r28,lo8(%o0-62)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0-62)))
++ : (AS2 (adiw,r28,%o0-62) CR_TAB
+ AS2 (std,Y+62,%A1) CR_TAB
+ AS2 (std,Y+63,%B1) CR_TAB
+ AS2 (sbiw,r28,%o0-62));
+
+- return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
++ return *l = 6, AVR_TINY ? (AS2 (subi,r28,lo8(-%o0)) CR_TAB
++ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
++ AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (subi,r28,lo8(1)) CR_TAB
++ AS2 (sbci,r29,hi8(1)) CR_TAB
++ AS2 (subi,r28,lo8(%o0)) CR_TAB
++ AS2 (sbci,r29,hi8(%o0)))
++ : (AS2 (subi,r28,lo8(-%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
+ AS2 (st,Y,%A1) CR_TAB
+ AS2 (std,Y+1,%B1) CR_TAB
+@@ -2888,7 +3504,16 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ if (!AVR_XMEGA)
+ {
+ *l = 7;
+- return (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ AS2 (mov,__zero_reg__,r27) CR_TAB
++ AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB
++ AS2 (st,X,__zero_reg__) CR_TAB
++ AS2 (st,-X,__tmp_reg__) CR_TAB
++ AS1 (clr,__zero_reg__) CR_TAB
++ AS2 (subi,r26,lo8(%o0)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0)))
++ : (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+ AS2 (adiw,r26,%o0+1) CR_TAB
+ AS2 (st,X,__zero_reg__) CR_TAB
+@@ -2899,19 +3524,35 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ else
+ {
+ *l = 7;
+- return (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ return AVR_TINY ? (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+- AS2 (adiw,r26,%o0) CR_TAB
++ AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (st,X,__zero_reg__) CR_TAB
+ AS1 (clr,__zero_reg__) CR_TAB
+- AS2 (sbiw,r26,%o0+1));
++ AS2 (subi,r26,lo8(%o0+1)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0+1)))
++ : (AS2 (mov,__tmp_reg__,r26) CR_TAB
++ AS2 (mov,__zero_reg__,r27) CR_TAB
++ AS2 (adiw,r26,%o0+1) CR_TAB
++ AS2 (st,X+,__tmp_reg__) CR_TAB
++ AS2 (st,X,__zero_reg__) CR_TAB
++ AS1 (clr,__zero_reg__) CR_TAB
++ AS2 (sbiw,r26,%o0));
++
+ }
+ }
+ if (!AVR_XMEGA)
+ {
+ *l = 4;
+- return (AS2 (adiw,r26,%o0+1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0+1))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0+1))) CR_TAB
++ AS2 (st,X,%B1) CR_TAB
++ AS2 (st,-X,%A1) CR_TAB
++ AS2 (subi,r26,lo8(%o0)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0)))
++ : (AS2 (adiw,r26,%o0+1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1) CR_TAB
+ AS2 (sbiw,r26,%o0));
+@@ -2919,7 +3560,13 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ else
+ {
+ *l = 4;
+- return (AS2 (adiw,r26,%o0) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,r27,hi8(-(%o0))) CR_TAB
++ AS2 (st,X+,%A1) CR_TAB
++ AS2 (st,X,%B1) CR_TAB
++ AS2 (subi,r26,lo8(%o0)) CR_TAB
++ AS2 (sbci,r27,hi8(%o0)))
++ : (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,%A1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (sbiw,r26,%o0+1));
+@@ -2927,11 +3574,49 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ }
+
+ if (!AVR_XMEGA)
+- return *l=2, (AS2 (std,%B0,%B1) CR_TAB
++ {
++ op[2] = XEXP(base, 0);
++ if(REGNO(op[2]) == REG_Y)
++ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+2))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0+2))) CR_TAB
++ AS2 (st,-Y,%B1) CR_TAB
++ AS2 (st,-Y,%A1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0)))
++ : (AS2 (std,%B0,%B1) CR_TAB
+ AS2 (std,%A0,%A1));
++ if(REGNO(op[2]) == REG_Z)
++ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0+1))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0+1))) CR_TAB
++ AS2 (st,-Z,%B1) CR_TAB
++ AS2 (st,-Z,%A1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0)))
++ : (AS2 (std,%B0,%B1) CR_TAB
++ AS2 (std,%A0,%A1));
++ }
+ else
+- return *l=2, (AS2 (std,%A0,%A1) CR_TAB
++ {
++ op[2] = XEXP(base, 0);
++ if(REGNO(op[2]) == REG_Y)
++ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
++ AS2 (st,Y+,%A1) CR_TAB
++ AS2 (st,Y,%B1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0+1)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0+1)))
++ : (AS2 (std,%A0,%A1) CR_TAB
+ AS2 (std,%B0,%B1));
++ if(REGNO(op[2]) == REG_Z)
++ return *l=2, AVR_TINY ? (AS2 (subi,%A2,lo8(-(%o0))) CR_TAB
++ AS2 (sbci,%B2,hi8(-(%o0))) CR_TAB
++ AS2 (st,Z+,%A1) CR_TAB
++ AS2 (st,Z,%B1) CR_TAB
++ AS2 (subi,%A2,lo8(%o0+1)) CR_TAB
++ AS2 (sbci,%B2,hi8(%o0+1)))
++ : (AS2 (std,%A0,%A1) CR_TAB
++ AS2 (std,%B0,%B1));
++ }
+ }
+ else if (GET_CODE (base) == PRE_DEC) /* (--R) */
+ {
+@@ -2951,15 +3636,30 @@ out_movhi_mr_r (rtx insn, rtx op[], int
+ if (REGNO (XEXP (base, 0)) == REG_X)
+ {
+ *l = 4;
+- return (AS2 (adiw,r26,1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,r26,lo8(-1)) CR_TAB
++ AS2 (sbci,r27,hi8(-1)) CR_TAB
++ AS2 (st,X,%B1) CR_TAB
++ AS2 (st,-X,%A1) CR_TAB
++ AS2 (subi,r26,lo8(-2)) CR_TAB
++ AS2 (sbci,r27,hi8(-2)))
++ : (AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1) CR_TAB
+ AS2 (adiw,r26,2));
+ }
+ else
+ {
++ //FIXME:check the code once again for AVR_TINY
+ *l = 3;
+- return (AS2 (std,%p0+1,%B1) CR_TAB
++ return AVR_TINY ? (AS2 (subi,%A0,lo8(-1)) CR_TAB
++ AS2 (sbci,%B0,hi8(-1)) CR_TAB
++ AS2 (st,%p0,%B1) CR_TAB
++ AS2 (subi,%A0,lo8(1)) CR_TAB
++ AS2 (sbci,%B0,hi8(1)) CR_TAB
++ AS2 (st,%p0,%A1) CR_TAB
++ AS2 (subi,%A0,lo8(-3)) CR_TAB
++ AS2 (sbci,%B0,hi8(-3)))
++ : (AS2 (std,%p0+1,%B1) CR_TAB
+ AS2 (st,%p0,%A1) CR_TAB
+ AS2 (adiw,%r0,2));
+ }
+@@ -3049,7 +3749,9 @@ out_tsthi (rtx insn, rtx op, int *l)
+ if (test_hard_reg_class (ADDW_REGS, op))
+ {
+ if (l) *l = 1;
+- return AS2 (sbiw,%0,0);
++ return AVR_TINY ? (AS2 (subi,%A0,lo8(0)) CR_TAB
++ AS2 (sbci,%B0,hi8(0)))
++ : AS2 (sbiw,%0,0);
+ }
+ if (l) *l = 2;
+ return (AS2 (cp,%A0,__zero_reg__) CR_TAB
+@@ -3070,7 +3772,11 @@ out_tstsi (rtx insn, rtx op, int *l)
+ if (test_hard_reg_class (ADDW_REGS, op))
+ {
+ if (l) *l = 3;
+- return (AS2 (sbiw,%A0,0) CR_TAB
++ return AVR_TINY ? (AS2 (subi,%A0,lo8(-(-0))) CR_TAB
++ AS2 (sbci,%B0,hi8(-(-0))) CR_TAB
++ AS2 (cpc,%C0,__zero_reg__) CR_TAB
++ AS2 (cpc,%D0,__zero_reg__))
++ : (AS2 (sbiw,%A0,0) CR_TAB
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB
+ AS2 (cpc,%D0,__zero_reg__));
+ }
+@@ -5392,10 +6098,12 @@ avr_file_start (void)
+ /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
+ fputs ("__SREG__ = 0x3f\n"
+ "__SP_H__ = 0x3e\n"
+- "__SP_L__ = 0x3d\n"
+- "__CCP__ = 0x34\n", asm_out_file);
++ "__SP_L__ = 0x3d\n", asm_out_file);
++
++ AVR_TINY ? fputs ("__CCP__ = 0x3c\n", asm_out_file) : fputs ("__CCP__ = 0x34\n", asm_out_file);
+
+- fputs ("__tmp_reg__ = 0\n"
++ AVR_TINY ? fputs ("__tmp_reg__ = 16\n"
++ "__zero_reg__ = 17\n", asm_out_file) : fputs ("__tmp_reg__ = 0\n"
+ "__zero_reg__ = 1\n", asm_out_file);
+
+ /* FIXME: output these only if there is anything in the .data / .bss
+diff -Naurp gcc/config/avr/avr-c.c gcc/config/avr/avr-c.c
+--- gcc/config/avr/avr-c.c 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/avr-c.c 2011-09-02 11:46:03.000000000 +0300
+@@ -94,5 +94,9 @@ avr_cpu_cpp_builtins (struct cpp_reader
+ cpp_define (pfile, "__AVR_HAVE_RAMPD__");
+ }
+
++ if (avr_current_arch->avrtiny)
++ {
++ cpp_define (pfile, "__AVR_TINY__");
++ }
+ }
+
+diff -Naurp gcc/config/avr/avr-devices.c gcc/config/avr/avr-devices.c
+--- gcc/config/avr/avr-devices.c 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/avr-devices.c 2011-09-02 11:46:03.000000000 +0300
+@@ -26,24 +26,25 @@
+ /* List of all known AVR MCU architectyres. */
+
+ const struct base_arch_s avr_arch_types[] = {
+- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */
+- { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" },
+- { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" },
+- { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" },
+- { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" },
+- { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" },
+- { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" },
+- { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" },
+- { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" },
+- { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" },
+- { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" },
+- { 0, 1, 0, 1, 0, 0, 0, 1, 0, 0x2000, "__AVR_ARCH__=101", "avrxmega1" },
+- { 0, 1, 1, 1, 0, 0, 0, 1, 0, 0x2000, "__AVR_ARCH__=102", "avrxmega2" },
+- { 0, 1, 1, 1, 0, 0, 0, 1, 1, 0x2000, "__AVR_ARCH__=103", "avrxmega3" },
+- { 0, 1, 1, 1, 1, 1, 0, 1, 0, 0x2000, "__AVR_ARCH__=104", "avrxmega4" },
+- { 0, 1, 1, 1, 1, 1, 0, 1, 1, 0x2000, "__AVR_ARCH__=105", "avrxmega5" },
+- { 0, 1, 1, 1, 1, 1, 1, 1, 0, 0x2000, "__AVR_ARCH__=106", "avrxmega6" },
+- { 0, 1, 1, 1, 1, 1, 1, 1, 1, 0x2000, "__AVR_ARCH__=107", "avrxmega7" }
++ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, NULL, "avr2" }, /* unknown device specified */
++ { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=1", "avr1" },
++ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=2", "avr2" },
++ { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=25", "avr25" },
++ { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=3", "avr3" },
++ { 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=31", "avr31" },
++ { 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=35", "avr35" },
++ { 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=4", "avr4" },
++ { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=5", "avr5" },
++ { 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0x0060, "__AVR_ARCH__=51", "avr51" },
++ { 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0x0060, "__AVR_ARCH__=6", "avr6" },
++ { 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=101", "avrxmega1" },
++ { 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=102", "avrxmega2" },
++ { 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0x2000, "__AVR_ARCH__=103", "avrxmega3" },
++ { 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0x2000, "__AVR_ARCH__=104", "avrxmega4" },
++ { 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0x2000, "__AVR_ARCH__=105", "avrxmega5" },
++ { 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0x2000, "__AVR_ARCH__=106", "avrxmega6" },
++ { 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0x2000, "__AVR_ARCH__=107", "avrxmega7" },
++ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0x0040, "__AVR_ARCH__=201", "avrtiny10" }
+ };
+
+ /* List of all known AVR MCU types - if updated, it has to be kept
+@@ -228,6 +229,14 @@ const struct mcu_type_s avr_mcu_types[]
+ { "avrxmega7", ARCH_AVRXMEGA7, NULL, 0, 0x2000, "x128a1" },
+ { "atxmega128a1", ARCH_AVRXMEGA7, "__AVR_ATxmega128A1__", 0, 0x2000, "x128a1" },
+ { "atxmega128a1u", ARCH_AVRXMEGA7, "__AVR_ATxmega128A1U__", 0, 0x2000, "x128a1u" },
++ /* tiny10 family */
++ { "avrtiny10", ARCH_AVRTINY10, NULL, 0, 0x0040, "tn10" },
++ { "attiny4", ARCH_AVRTINY10, "__AVR_ATtiny4__", 0, 0x0040, "tn4" },
++ { "attiny5", ARCH_AVRTINY10, "__AVR_ATtiny5__", 0, 0x0040, "tn5" },
++ { "attiny9", ARCH_AVRTINY10, "__AVR_ATtiny9__", 0, 0x0040, "tn9" },
++ { "attiny10", ARCH_AVRTINY10, "__AVR_ATtiny10__", 0, 0x0040, "tn10" },
++ { "attiny20", ARCH_AVRTINY10, "__AVR_ATtiny20__", 0, 0x0040, "tn20" },
++ { "attiny40", ARCH_AVRTINY10, "__AVR_ATtiny40__", 0, 0x0040, "tn40" },
+ /* Assembler only. */
+ { "avr1", ARCH_AVR1, NULL, 0, 0x0060, "s1200" },
+ { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__", 0, 0x0060, "s1200" },
+diff -Naurp gcc/config/avr/avr.h gcc/config/avr/avr.h
+--- gcc/config/avr/avr.h 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/avr.h 2011-09-02 11:46:03.000000000 +0300
+@@ -51,6 +51,9 @@ struct base_arch_s {
+ /* Core have RAMPX, RAMPY and RAMPD registers. */
+ int have_rampx_y_d;
+
++ /* Core is in avrtiny10 family. */
++ int avrtiny;
++
+ /* Default start of data section address for architecture. */
+ int default_data_section_start;
+
+@@ -82,7 +85,8 @@ enum avr_arch
+ ARCH_AVRXMEGA4,
+ ARCH_AVRXMEGA5,
+ ARCH_AVRXMEGA6,
+- ARCH_AVRXMEGA7
++ ARCH_AVRXMEGA7,
++ ARCH_AVRTINY10
+ };
+
+ struct mcu_type_s {
+@@ -126,6 +130,7 @@ extern GTY(()) section *progmem_section;
+ #define AVR_HAVE_EIJMP_EICALL (avr_current_arch->have_eijmp_eicall)
+ #define AVR_HAVE_8BIT_SP (avr_current_device->short_sp || TARGET_TINY_STACK)
+ #define AVR_XMEGA (avr_current_arch->xmega)
++#define AVR_TINY (avr_current_arch->avrtiny)
+ #define AVR_HAVE_RAMPX_Y_D (avr_current_arch->have_rampx_y_d)
+
+ #define AVR_2_BYTE_PC (!AVR_HAVE_EIJMP_EICALL)
+@@ -249,7 +254,6 @@ extern GTY(()) section *progmem_section;
+
+ #define ORDER_REGS_FOR_LOCAL_ALLOC order_regs_for_local_alloc ()
+
+-
+ #define HARD_REGNO_NREGS(REGNO, MODE) ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
+
+ #define HARD_REGNO_MODE_OK(REGNO, MODE) avr_hard_regno_mode_ok(REGNO, MODE)
+@@ -313,6 +317,41 @@ enum reg_class {
+ {0xffffffff,0x00000003} /* ALL_REGS */ \
+ }
+
++/* Zero or more C statements that may conditionally modify five variables
++ fixed_regs, call_used_regs, global_regs, reg_names, and reg_class_contents,
++ to take into account any dependence of these register sets on target flags.
++ The first three of these are of type char [] (interpreted as Boolean
++ vectors). global_regs is a const char *[], and reg_class_contents is a
++ HARD_REG_SET. Before the macro is called, fixed_regs, call_used_regs,
++ reg_class_contents, and reg_names have been initialized from
++ FIXED_REGISTERS, CALL_USED_REGISTERS, REG_CLASS_CONTENTS, and
++ REGISTER_NAMES, respectively. global_regs has been cleared, and any
++ \91-ffixed-reg\92, \91-fcall-used-reg\92 and \91-fcall-saved-reg\92 command options
++ have been applied.
++
++ You need not define this macro if it has no work to do.
++
++ If the usage of an entire class of registers depends on the target flags,
++ you may indicate this to GCC by using this macro to modify fixed_regs and
++ call_used_regs to 1 for each of the registers in the classes which should
++ not be used by GCC. Also define the macro REG_CLASS_FROM_LETTER /
++ REG_CLASS_FROM_CONSTRAINT to return NO_REGS if it is called with a letter
++ for a class that shouldn\92t be used. (However, if this class is not included
++ in GENERAL_REGS and all of the insn patterns whose constraints permit this
++ class are controlled by target switches, then GCC will automatically avoid
++ using these registers when the target switches are opposed to them.) */
++
++#define CONDITIONAL_REGISTER_USAGE \
++ if (AVR_TINY) { \
++ int i; \
++ for (i = 0; i <= 17; i++) { \
++ fixed_regs[i] = 1; \
++ call_used_regs[i] = 1; \
++ } \
++ CLEAR_HARD_REG_SET(reg_class_contents[(int)ADDW_REGS]); \
++ CLEAR_HARD_REG_SET(reg_class_contents[(int)NO_LD_REGS]); \
++ }
++
+ #define REGNO_REG_CLASS(R) avr_regno_reg_class(R)
+
+ /* The following macro defines cover classes for Integrated Register
+diff -Naurp gcc/config/avr/avr.md gcc/config/avr/avr.md
+--- gcc/config/avr/avr.md 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/avr.md 2011-09-02 11:46:03.000000000 +0300
+@@ -186,6 +186,9 @@
+ DONE;
+ })
+
++(define_constants
++ [(TMP_REGNO_AVRTINY10 16) ; temporary register r16
++ (ZERO_REGNO_AVRTINY10 17)]) ; zero register r17
+
+ (define_insn "*push<ALLQ:mode>"
+ [(set (mem:ALLQ (post_dec (reg:HI REG_SP)))
+@@ -479,7 +482,7 @@
+ rtx addr1 = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
+
+ /* Create rtx for tmp register - we use this as scratch. */
+- rtx tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
++ rtx tmp_reg_rtx = gen_rtx_REG (QImode, AVR_TINY ? TMP_REGNO_AVRTINY10 : TMP_REGNO);
+
+ if (GET_CODE (operands[2]) != CONST_INT)
+ FAIL;
+@@ -2900,7 +2903,7 @@
+ UNSPEC_INDEX_JMP))
+ (use (label_ref (match_operand 1 "" "")))
+ (clobber (match_dup 0))]
+- "AVR_HAVE_JMP_CALL && !AVR_HAVE_EIJMP_EICALL"
++ "(AVR_HAVE_JMP_CALL && !AVR_HAVE_EIJMP_EICALL)"
+ "lsl r30
+ rol r31
+ lpm
+diff -Naurp gcc/config/avr/libgcc-fixed.S gcc/config/avr/libgcc-fixed.S
+--- gcc/config/avr/libgcc-fixed.S 2011-09-02 11:40:55.000000000 +0300
++++ gcc/config/avr/libgcc-fixed.S 2011-09-02 11:46:03.000000000 +0300
+@@ -29,13 +29,17 @@ Boston, MA 02110-1301, USA. */
+
+ /* Fixed point library routines for avr. */
+
++#if defined (__AVR_TINY__)
++#define __zero_reg__ r17
++#define __tmp_reg__ r16
++#else
+ #define __zero_reg__ r1
+ #define __tmp_reg__ r0
++#endif
+ #define __SREG__ 0x3f
+ #define __SP_H__ 0x3e
+ #define __SP_L__ 0x3d
+ #define __RAMPZ__ 0x3B
+-
+ /* Conversions to float. */
+ #if defined (L_fractqqsf)
+ .global __fractqqsf
+@@ -281,15 +285,15 @@ __muluqq3_exit:
+ .func __mulhq3
+ __mulhq3:
+ fmuls r_arg1H, r_arg2H
+- movw r_resL, r0
++ movw r_resL, __tmp_reg__
+ fmulsu r_arg2H, r_arg1L
+ clr r_arg1L
+ sbc r_resH, r_arg1L
+- add r_resL, r1
++ add r_resL, __zero_reg__
+ adc r_resH, r_arg1L
+ fmulsu r_arg1H, r_arg2L
+ sbc r_resH, r_arg1L
+- add r_resL, r1
++ add r_resL, __zero_reg__
+ adc r_resH, r_arg1L
+ clr __zero_reg__
+ ret
+@@ -301,13 +305,13 @@ __mulhq3:
+ .func __muluhq3
+ __muluhq3:
+ mul r_arg1H, r_arg2H
+- movw r_resL, r0
++ movw r_resL, __tmp_reg__
+ mul r_arg1H, r_arg2L
+- add r_resL, r1
++ add r_resL, __zero_reg__
+ clr __zero_reg__
+ adc r_resH, __zero_reg__
+ mul r_arg1L, r_arg2H
+- add r_resL, r1
++ add r_resL, __zero_reg__
+ clr __zero_reg__
+ adc r_resH, __zero_reg__
+ ret
+@@ -401,15 +405,15 @@ __muluhq3_skip:
+ .func __mulha3
+ __mulha3:
+ mul r_arg1L, r_arg2L
+- mov r_resL, r1
++ mov r_resL, __zero_reg__
+ muls r_arg1H, r_arg2H
+- mov r_resH, r0
++ mov r_resH, __tmp_reg__
+ mulsu r_arg1H, r_arg2L
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ mulsu r_arg2H, r_arg1L
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ clr __zero_reg__
+ ret
+ .endfunc
+@@ -420,15 +424,15 @@ __mulha3:
+ .func __muluha3
+ __muluha3:
+ mul r_arg1L, r_arg2L
+- mov r_resL, r1
++ mov r_resL, __zero_reg__
+ mul r_arg1H, r_arg2H
+- mov r_resH, r0
++ mov r_resH, __tmp_reg__
+ mul r_arg1H, r_arg2L
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ mul r_arg1L, r_arg2H
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ clr __zero_reg__
+ ret
+ .endfunc
+@@ -442,8 +446,8 @@ __muluha3:
+ #define r_arg2H r23 /* multiplicand High */
+ #define r_resL r18 /* result Low */
+ #define r_resH r19 /* result High */
+-#define r_scratchL r0 /* scratch Low */
+-#define r_scratchH r1
++#define r_scratchL __tmp_reg__ /* scratch Low */
++#define r_scratchH __zero_reg__
+
+ #if defined (L_mulha3)
+ .global __mulha3
+@@ -480,8 +484,8 @@ __mulha3_exit:
+ __muluha3:
+ clr r_resL ; clear result
+ clr r_resH
+- mov_l r0, r_arg1L ; save multiplicand
+- mov_h r1, r_arg1H
++ mov_l __tmp_reg__, r_arg1L ; save multiplicand
++ mov_h __zero_reg__, r_arg1H
+ __muluha3_loop1:
+ sbrs r_arg2H,0
+ rjmp __muluha3_skip1
+@@ -490,7 +494,12 @@ __muluha3_loop1:
+ __muluha3_skip1:
+ lsl r_arg1L ; shift multiplicand
+ rol r_arg1H
++#if defined (__AVR_TINY__)
++ subi r_arg1L, lo8(0)
++ sbci r_arg1L, hi8(0)
++#else
+ sbiw r_arg1L,0
++#endif
+ breq __muluha3_loop1_done ; exit multiplicand = 0
+ lsr r_arg2H
+ brne __muluha3_loop1 ; exit multiplier = 0
+@@ -500,7 +509,12 @@ __muluha3_loop1_done:
+ __muluha3_loop2:
+ lsr r_arg1H ; shift multiplicand
+ ror r_arg1L
++#if defined (__AVR_TINY__)
++ subi r_arg1L, lo8(0)
++ sbci r_arg1L, hi8(0)
++#else
+ sbiw r_arg1L,0
++#endif
+ breq __muluha3_exit ; exit if multiplicand = 0
+ sbrs r_arg2L,7
+ rjmp __muluha3_skip2
+@@ -556,53 +570,53 @@ __mulsa3:
+ clr r_resHL
+ clr r_resHH
+ mul r_arg1H, r_arg2L
+- mov r_resL, r1
++ mov r_resL, __zero_reg__
+ mul r_arg1L, r_arg2H
+- add r_resL, r1
++ add r_resL, __zero_reg__
+ adc r_resH, r_clr
+ mul r_arg1L, r_arg2HL
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ adc r_resHL, r_clr
+ mul r_arg1H, r_arg2H
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ adc r_resHL, r_clr
+ mul r_arg1HL, r_arg2L
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ adc r_resHL, r_clr
+ mulsu r_arg2HH, r_arg1L
+ sbc r_resHH, r_clr
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mul r_arg1H, r_arg2HL
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mul r_arg1HL, r_arg2H
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mulsu r_arg1HH, r_arg2L
+ sbc r_resHH, r_clr
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mulsu r_arg2HH, r_arg1H
+- add r_resHL, r0
+- adc r_resHH, r1
++ add r_resHL, __tmp_reg__
++ adc r_resHH, __zero_reg__
+ mul r_arg1HL, r_arg2HL
+- add r_resHL, r0
+- adc r_resHH, r1
++ add r_resHL, __tmp_reg__
++ adc r_resHH, __zero_reg__
+ mulsu r_arg1HH, r_arg2H
+- add r_resHL, r0
+- adc r_resHH, r1
++ add r_resHL, __tmp_reg__
++ adc r_resHH, __zero_reg__
+ mulsu r_arg2HH, r_arg1HL
+- add r_resHH, r0
++ add r_resHH, __tmp_reg__
+ mulsu r_arg1HH, r_arg2HL
+- add r_resHH, r0
++ add r_resHH, __tmp_reg__
+ clr __zero_reg__
+ ret
+ .endfunc
+@@ -617,51 +631,51 @@ __mulusa3:
+ clr r_resHL
+ clr r_resHH
+ mul r_arg1H, r_arg2L
+- mov r_resL, r1
++ mov r_resL, __zero_reg__
+ mul r_arg1L, r_arg2H
+- add r_resL, r1
++ add r_resL, __zero_reg__
+ adc r_resH, r_clr
+ mul r_arg1L, r_arg2HL
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ adc r_resHL, r_clr
+ mul r_arg1H, r_arg2H
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ adc r_resHL, r_clr
+ mul r_arg1HL, r_arg2L
+- add r_resL, r0
+- adc r_resH, r1
++ add r_resL, __tmp_reg__
++ adc r_resH, __zero_reg__
+ adc r_resHL, r_clr
+ mul r_arg1L, r_arg2HH
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mul r_arg1H, r_arg2HL
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mul r_arg1HL, r_arg2H
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mul r_arg1HH, r_arg2L
+- add r_resH, r0
+- adc r_resHL, r1
++ add r_resH, __tmp_reg__
++ adc r_resHL, __zero_reg__
+ adc r_resHH, r_clr
+ mul r_arg1H, r_arg2HH
+- add r_resHL, r0
+- adc r_resHH, r1
++ add r_resHL, __tmp_reg__
++ adc r_resHH, __zero_reg__
+ mul r_arg1HL, r_arg2HL
+- add r_resHL, r0
+- adc r_resHH, r1
++ add r_resHL, __tmp_reg__
++ adc r_resHH, __zero_reg__
+ mul r_arg1HH, r_arg2H
+- add r_resHL, r0
+- adc r_resHH, r1
++ add r_resHL, __tmp_reg__
++ adc r_resHH, __zero_reg__
+ mul r_arg1HL, r_arg2HH
+- add r_resHH, r0
++ add r_resHH, __tmp_reg__
+ mul r_arg1HH, r_arg2HL
+- add r_resHH, r0
++ add r_resHH, __tmp_reg__
+ clr __zero_reg__
+ ret
+ .endfunc
+@@ -680,13 +694,20 @@ __mulusa3:
+ #define r_arg2HL r26
+ #define r_arg2HH r27 /* multiplicand High */
+
++#if defined (__AVR_TINY__)
++#define r_resL r28 /* result Low */
++#define r_resH r29
++#define r_resHL r30
++#define r_resHH r31 /* result High */
++#else
+ #define r_resL r14 /* result Low */
+ #define r_resH r15
+ #define r_resHL r16
+ #define r_resHH r17 /* result High */
++#endif
+
+-#define r_scratchL r0 /* scratch Low */
+-#define r_scratchH r1
++#define r_scratchL __tmp_reg__ /* scratch Low */
++#define r_scratchH __zero_reg__
+ #define r_scratchHL r22
+ #define r_scratchHH r23 /* scratch High */
+
+@@ -758,7 +779,12 @@ __mulusa3_skip1:
+ rol r_arg1HH
+ lsr r_arg2HH
+ ror r_arg2HL
++#if defined (__AVR_TINY__)
++ subi r_arg2HL, lo8(0)
++ sbci r_arg2HL, hi8(0)
++#else
+ sbiw r_arg2HL,0
++#endif
+ brne __mulusa3_loop1 ; exit multiplier = 0
+ __mulusa3_loop1_done:
+ mov_l r_arg1L, r_scratchL ; restore multiplicand
+@@ -779,7 +805,12 @@ __mulusa3_loop2:
+ __mulusa3_skip2:
+ lsl r_arg2L
+ rol r_arg2H
++#if defined (__AVR_TINY__)
++ subi r_arg2L, lo8(0)
++ sbci r_arg2L, hi8(0)
++#else
+ sbiw r_arg2L,0
++#endif
+ brne __mulusa3_loop2 ; exit if multiplier = 0
+ __mulusa3_exit:
+ clr __zero_reg__ ; got clobbered
+@@ -791,9 +822,7 @@ __mulusa3_exit:
+ #undef r_scratchH
+ #undef r_scratchHL
+ #undef r_scratchHH
+-
+ #endif
+-
+ #undef r_arg1L
+ #undef r_arg1H
+ #undef r_arg1HL
+@@ -821,8 +850,8 @@ __mulusa3_exit:
+ .global __divqq3
+ .func __divqq3
+ __divqq3:
+- mov r0, r_divd
+- eor r0, r_div
++ mov __tmp_reg__, r_divd
++ eor __tmp_reg__, r_div
+ sbrc r_div, 7
+ neg r_div
+ sbrc r_divd, 7
+@@ -831,7 +860,7 @@ __divqq3:
+ breq __divqq3_minus1 ; if equal return -1
+ rcall __udivuqq3
+ lsr r_quo
+- sbrc r0, 7 ; negate result if needed
++ sbrc __tmp_reg__, 7 ; negate result if needed
+ neg r_quo
+ ret
+ __divqq3_minus1:
+@@ -886,8 +915,8 @@ __udivuqq3_cont:
+ .global __divhq3
+ .func __divhq3
+ __divhq3:
+- mov r0, r_divdH
+- eor r0, r_divH
++ mov __tmp_reg__, r_divdH
++ eor __tmp_reg__, r_divH
+ sbrs r_divH, 7
+ rjmp __divhq3_divpos
+ com r_divH
+@@ -906,7 +935,7 @@ __divhq3_divdpos:
+ rcall __udivuhq3
+ lsr r_quoH
+ ror r_quoL
+- sbrs r0, 7 ; negate result if needed
++ sbrs __tmp_reg__, 7 ; negate result if needed
+ ret
+ com r_quoH
+ neg r_quoL
+@@ -958,8 +987,8 @@ __udivuhq3_cont:
+ .global __divha3
+ .func __divha3
+ __divha3:
+- mov r0, r_divdH
+- eor r0, r_divH
++ mov __tmp_reg__, r_divdH
++ eor __tmp_reg__, r_divH
+ sbrs r_divH, 7
+ rjmp __divha3_divpos
+ com r_divH
+@@ -973,7 +1002,7 @@ __divha3_divpos:
+ sbci r_divdH,-1
+ __divha3_divdpos:
+ rcall __udivuha3
+- sbrs r0, 7 ; negate result if needed
++ sbrs __tmp_reg__, 7 ; negate result if needed
+ ret
+ com r_quoH
+ neg r_quoL
+@@ -1027,8 +1056,8 @@ __udivuha3:
+ .global __divsa3
+ .func __divsa3
+ __divsa3:
+- mov r0, r27
+- eor r0, r_divHH
++ mov __tmp_reg__, r27
++ eor __tmp_reg__, r_divHH
+ sbrs r_divHH, 7
+ rjmp __divsa3_divpos
+ com r_divHH
+@@ -1050,7 +1079,7 @@ __divsa3_divpos:
+ sbci r_arg1HH,-1
+ __divsa3_arg1pos:
+ rcall __udivusa3
+- sbrs r0, 7 ; negate result if needed
++ sbrs __tmp_reg__, 7 ; negate result if needed
+ ret
+ com r_quoHH
+ com r_quoHL
+diff -Naurp gcc/config/avr/libgcc.S gcc/config/avr/libgcc.S
+--- gcc/config/avr/libgcc.S 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/libgcc.S 2011-09-02 11:46:03.000000000 +0300
+@@ -22,8 +22,13 @@ a copy of the GCC Runtime Library Except
+ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+ <http://www.gnu.org/licenses/>. */
+
++#if defined (__AVR_TINY__)
++#define __zero_reg__ r17
++#define __tmp_reg__ r16
++#else
+ #define __zero_reg__ r1
+ #define __tmp_reg__ r0
++#endif
+ #define __SREG__ 0x3f
+ #define __SP_H__ 0x3e
+ #define __SP_L__ 0x3d
+@@ -140,7 +145,12 @@ __mulhi3_skip1:
+
+ lsr r_arg1H ; gets LSB of multiplier
+ ror r_arg1L
++#if defined (__AVR_TINY__)
++ subi r_arg1L, lo8(0)
++ sbci r_arg1L, hi8(0)
++#else
+ sbiw r_arg1L,0
++#endif
+ brne __mulhi3_loop ; exit if multiplier = 0
+ __mulhi3_exit:
+ mov r_arg1H,r_resH ; result to return register
+@@ -304,7 +314,12 @@ __mulsi3_skip1:
+ ror r_arg1H
+ ror r_arg1L
+ brne __mulsi3_loop
++#if defined (__AVR_TINY__)
++ subi r_arg1HL, lo8(0)
++ sbci r_arg1HL, hi8(0)
++#else
+ sbiw r_arg1HL,0
++#endif
+ cpc r_arg1H,r_arg1L
+ brne __mulsi3_loop ; exit if multiplier = 0
+ __mulsi3_exit:
+@@ -610,6 +625,7 @@ __divmodsi4_neg1:
+ /**********************************
+ * This is a prologue subroutine
+ **********************************/
++#if !defined (__AVR_TINY__)
+ #if defined (L_prologue)
+
+ .global __prologue_saves__
+@@ -663,7 +679,6 @@ __prologue_saves__:
+ * This is an epilogue subroutine
+ */
+ #if defined (L_epilogue)
+-
+ .global __epilogue_restores__
+ .func __epilogue_restores__
+ __epilogue_restores__:
+@@ -704,6 +719,7 @@ __epilogue_restores__:
+ ret
+ .endfunc
+ #endif /* defined (L_epilogue) */
++#endif /* !defined (__AVR_TINY__) */
+
+ #ifdef L_exit
+ .section .fini9,"ax",@progbits
+@@ -730,6 +746,7 @@ _cleanup:
+ .endfunc
+ #endif /* defined (L_cleanup) */
+
++#if !defined(__AVR_TINY__)
+ #ifdef L_tablejump
+ .global __tablejump2__
+ .func __tablejump2__
+@@ -762,7 +779,9 @@ __tablejump__:
+ #endif
+ .endfunc
+ #endif /* defined (L_tablejump) */
++#endif
+
++#if !defined(__AVR_TINY__)
+ #ifdef L_copy_data
+ .section .init4,"ax",@progbits
+ .global __do_copy_data
+@@ -824,6 +843,7 @@ __do_copy_data:
+ brne .L__do_copy_data_loop
+ #endif /* !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__) */
+ #endif /* L_copy_data */
++#endif
+
+ /* __do_clear_bss is only necessary if there is anything in .bss section. */
+
+@@ -864,7 +884,12 @@ __do_global_ctors:
+ ldi r20, hh8(__ctors_end)
+ rjmp .L__do_global_ctors_start
+ .L__do_global_ctors_loop:
++#if defined (__AVR_TINY__)
++ subi r28, lo8(2)
++ sbci r29, hi8(2)
++#else
+ sbiw r28, 2
++#endif
+ sbc r20, __zero_reg__
+ mov_h r31, r29
+ mov_l r30, r28
+@@ -882,7 +907,12 @@ __do_global_ctors:
+ ldi r29, hi8(__ctors_end)
+ rjmp .L__do_global_ctors_start
+ .L__do_global_ctors_loop:
++#if defined (__AVR_TINY__)
++ subi r28, lo8(2)
++ sbci r29, hi8(2)
++#else
+ sbiw r28, 2
++#endif
+ mov_h r31, r29
+ mov_l r30, r28
+ XCALL __tablejump__
+@@ -905,7 +935,12 @@ __do_global_dtors:
+ ldi r20, hh8(__dtors_start)
+ rjmp .L__do_global_dtors_start
+ .L__do_global_dtors_loop:
++#if defined (__AVR_TINY__)
++ subi r28, lo8(2)
++ sbci r29, hi8(2)
++#else
+ sbiw r28, 2
++#endif
+ sbc r20, __zero_reg__
+ mov_h r31, r29
+ mov_l r30, r28
+@@ -926,7 +961,12 @@ __do_global_dtors:
+ mov_h r31, r29
+ mov_l r30, r28
+ XCALL __tablejump__
++#if defined (__AVR_TINY__)
++ subi r28, lo8(-2)
++ sbci r29, hi8(-2)
++#else
+ adiw r28, 2
++#endif
+ .L__do_global_dtors_start:
+ cpi r28, lo8(__dtors_end)
+ cpc r29, r17
+@@ -934,6 +974,7 @@ __do_global_dtors:
+ #endif /* defined(__AVR_HAVE_RAMPZ__) */
+ #endif /* L_dtors */
+
++#if !defined (__AVR_TINY__)
+ #ifdef L_tablejump_elpm
+ .global __tablejump_elpm__
+ .func __tablejump_elpm__
+@@ -963,5 +1004,6 @@ __tablejump_elpm__:
+ #endif /* defined (__AVR_HAVE_ELPM__) */
+ .endfunc
+ #endif /* defined (L_tablejump_elpm) */
++#endif /* !defined (__AVR_TINY__) */
+
+ #include "libgcc-fixed.S"
+diff -Naurp gcc/config/avr/t-avr gcc/config/avr/t-avr
+--- gcc/config/avr/t-avr 2011-09-02 11:45:05.000000000 +0300
++++ gcc/config/avr/t-avr 2011-09-02 11:46:03.000000000 +0300
+@@ -107,8 +107,8 @@ fp-bit.c: $(srcdir)/config/fp-bit.c $(sr
+
+ FPBIT = fp-bit.c
+
+-MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr31/mmcu=avr35/mmcu=avr4/mmcu=avr5/mmcu=avr51/mmcu=avr6/mmcu=avrxmega2/mmcu=avrxmega4/mmcu=avrxmega5/mmcu=avrxmega6/mmcu=avrxmega7
+-MULTILIB_DIRNAMES = avr2 avr25 avr3 avr31 avr35 avr4 avr5 avr51 avr6 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7
++MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr31/mmcu=avr35/mmcu=avr4/mmcu=avr5/mmcu=avr51/mmcu=avr6/mmcu=avrxmega2/mmcu=avrxmega4/mmcu=avrxmega5/mmcu=avrxmega6/mmcu=avrxmega7/mmcu=avrtiny10
++MULTILIB_DIRNAMES = avr2 avr25 avr3 avr31 avr35 avr4 avr5 avr51 avr6 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 avrtiny10
+
+ # The many avr2 matches are not listed here - this is the default.
+ MULTILIB_MATCHES = \
+@@ -241,7 +241,13 @@ MULTILIB_MATCHES = \
+ mmcu?avrxmega6=mmcu?atxmega256a3b \
+ mmcu?avrxmega6=mmcu?atxmega256d3 \
+ mmcu?avrxmega7=mmcu?atxmega128a1 \
+- mmcu?avrxmega7=mmcu?atxmega128a1u
++ mmcu?avrxmega7=mmcu?atxmega128a1u \
++ mmcu?avrtiny10=mmcu?attiny4 \
++ mmcu?avrtiny10=mmcu?attiny5 \
++ mmcu?avrtiny10=mmcu?attiny9 \
++ mmcu?avrtiny10=mmcu?attiny10 \
++ mmcu?avrtiny10=mmcu?attiny20 \
++ mmcu?avrtiny10=mmcu?attiny40
+
+ MULTILIB_EXCEPTIONS =
+