mov a,%3
}
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ clr a
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ ; Peephole 3.d removed redundant clr
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ clr a
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ ; Peephole 3.e removed redundant clr
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ clr a
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ ; Peephole 3.f removed redundant clr
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ mov %5,a
+ clr a
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ mov %5,a
+ ; Peephole 3.g removed redundant clr
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,#0x00
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ ; Peephole 3.h changed mov %3,#0x00 to %3,a
+ mov %3,a
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,#0x00
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ ; Peephole 3.i changed mov %4,#0x00 to %4,a
+ mov %4,a
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ mov %5,#0x00
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ ; Peephole 3.j changed mov %5,#0x00 to %5,a
+ mov %5,a
+}
+
+replace {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ mov %5,a
+ mov %6,#0x00
+} by {
+ clr a
+ mov %1,a
+ mov %2,a
+ mov %3,a
+ mov %4,a
+ mov %5,a
+ ; Peephole 3.k changed mov %6,#0x00 to %6,a
+ mov %6,a
+}
+
replace {
mov %1,a
mov dptr,#%2
%1:
} if labelInRange
-
replace {
clr a
cjne %1,%2,%3
%3:
jnz %4
} by {
- ; Peephole 115 jump optimization
+ ; Peephole 115.a jump optimization
cjne %1,%2,%3
sjmp %4
%3:
} if labelRefCount %3 1
+replace {
+ mov %1,a
+ cjne %1,#0x00,%2
+ sjmp %3
+%2:
+} by {
+ ; Peephole 115.b jump optimization
+ mov %1,a
+ jz %3
+%2:
+}
+
replace {
clr a
cjne %1,%2,%3
} by {
; Peephole 123 jump optimization
cjne %1,%2,%3
- smp %4
+ sjmp %4
%3:
} if labelRefCount %3 1
jnc %1
}
+// applies to: bug-524691.c --model-large: while (uRight - uLeft > 1)
replace {
clr a
rlc a
- jnz %1
+ jnz %0
} by {
- ; Peephole 129 jump optimization
- jc %1
+ ; Peephole 129.a jump optimization
+ jc %0
}
+// applies to: _fsdiv.c --xstack: if (mant1 < mant2)
+replace {
+ clr a
+ rlc a
+ pop %1
+ jnz %0
+} by {
+ ; Peephole 129.b optimized condition
+ pop %1
+ jc %0
+} if notVolatile %1
+
+// applies to: time.c --xstack: while((days += (LEAP_YEAR(year) ? 366 : 365)) <= epoch)
+replace {
+ clr a
+ rlc a
+ pop %1
+ pop %2
+ jnz %0
+} by {
+ ; Peephole 129.c optimized condition
+ pop %1
+ pop %2
+ jc %0
+} if notVolatile %1 %2
+
+// applies to: _memmove.c --xstack: if (((int)src < (int)dst) && ((((int)src)+acount) > (int)dst))
+replace {
+ clr a
+ rlc a
+ pop %1
+ pop %2
+ pop %3
+ jnz %0
+} by {
+ ; Peephole 129.d optimized condition
+ pop %1
+ pop %2
+ pop %3
+ jc %0
+} if notVolatile %1 %2 %3
+
replace {
mov r%1,@r%2
} by {
mov %2,%1
}
+// ideally the optimizations of rules 132.x should be done in genCmpXX
+replace {
+ clr c
+ mov a,#%1
+ subb a,%2
+ mov %3,c
+} by {
+ ; Peephole 132.a optimized genCmpGt by inverse logic (acc differs)
+ mov a,%2
+ add a,#0xff - %1
+ mov %3,c
+}
+
+replace {
+ clr c
+ mov a,#%1
+ subb a,%2
+ jnc %5
+} by {
+ ; Peephole 132.b optimized genCmpGt by inverse logic (acc differs)
+ mov a,%2
+ add a,#0xff - %1
+ jnc %5
+}
+
+replace {
+ clr c
+ mov a,#%1
+ subb a,%2
+ jc %5
+} by {
+ ; Peephole 132.c optimized genCmpGt by inverse logic (acc differs)
+ mov a,%2
+ add a,#0xff - %1
+ jc %5
+}
+
+replace {
+ clr c
+ mov a,%1
+ subb a,#%2
+ mov %3,c
+} by {
+ ; Peephole 132.d optimized genCmpGt by inverse logic
+ mov a,#0x100 - %2
+ add a,%1
+ mov %3,c
+} if operandsNotRelated('0x00' %2)
+
+replace {
+ clr c
+ mov a,%1
+ subb a,#%2
+ jnc %5
+} by {
+ ; Peephole 132.e optimized genCmpLt by inverse logic (carry differs)
+ mov a,#0x100 - %2
+ add a,%1
+ jc %5
+} if operandsNotRelated('0x00' %2)
+
+replace {
+ clr c
+ mov a,%1
+ subb a,#%2
+ jc %5
+} by {
+ ; Peephole 132.f optimized genCmpLt by inverse logic (carry differs)
+ mov a,#0x100 - %2
+ add a,%1
+ jnc %5
+} if operandsNotRelated('0x00' %2)
+
+
replace {
mov r%1,%2
mov ar%3,@r%1
xrl %1,#0x80
} by {
; Peephole 159 avoided xrl during execution
- mov %1,#(%2 ^ 0x80)
+ mov %1,#(%2 ^ 0x80)
}
replace {
// applies to f.e. bug-408972.c
// not before peephole 177.c
-replace {
+replace restart {
mov %1,%2
mov %3,%4
mov %2,%1
; Peephole 177.d removed redundant move
mov %1,%2
mov %3,%4
-} if notVolatile %1 %2
+} if notVolatile(%1 %2),operandsNotRelated(%1 %3)
// applies to f.e. bug-607243.c
// also check notVolatile %3, as it will return FALSE if it's @r%1
mov r%1,%3
} if notVolatile %2
+replace {
+ mov %1,%2
+ mov a,%1
+} by {
+ ; peephole 177.g optimized mov sequence
+ mov a,%2
+ mov %1,a
+} if notVolatile %1
+
+replace {
+ mov %1,%2
+ mov a,%2
+} by {
+ ; peephole 177.h optimized mov sequence
+ mov a,%2
+ mov %1,a
+} if notVolatile %2
+
replace {
mov a,%1
mov b,a
replace {
clr a
- movx @dptr,a
- inc dptr
- movx @dptr,a
- inc dptr
+ movx @%1,a
+ inc %1
+ movx @%1,a
+ inc %1
clr a
} by {
; Peephole 226 removed unnecessary clr
clr a
- movx @dptr,a
- inc dptr
- movx @dptr,a
- inc dptr
+ movx @%1,a
+ inc %1
+ movx @%1,a
+ inc %1
}
replace {
movx @dptr,a
}
+// Peeepholes 248.i to 248.m are like 248.d to 248.h except they apply to bitfields:
+// xdata struct { unsigned b0:1; unsigned b1:1; unsigned b2:1; } xport;
+// xport.b0=1; xport.b0=0; xport.b0=1;
replace {
- jnz %1
-%1:
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%5
+ movx @dptr,a
} by {
- ; Peephole 249.a jump optimization
-} if labelRefCount %1 1
+ ; Peephole 248.i optimized or/and/or to xdata bitfield
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+ movx a,@dptr
+ orl a,%5
+ movx @dptr,a
+}
replace {
- jz %1
-%1:
-} by {
- ; Peephole 249.b jump optimization
-} if labelRefCount %1 1
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
-// This allows non-interrupt and interrupt code to safely compete
-// for a resource without the non-interrupt code having to disable
-// interrupts:
-// volatile bit resource_is_free;
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%5
+ movx @dptr,a
+} by {
+ ; Peephole 248.j optimized and/or/and to xdata bitfield
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+ movx a,@dptr
+ anl a,%5
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+} by {
+ ; Peephole 248.k optimized or/and to xdata bitfield
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+} by {
+ ; Peephole 248.l optimized and/or to xdata bitfield
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ xrl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ xrl a,%4
+ movx @dptr,a
+} by {
+ ; Peephole 248.m optimized xor/xor to xdata bitfield
+ mov dptr,%1
+ movx a,@dptr
+ xrl a,%3
+ movx @dptr,a
+ movx a,@dptr
+ xrl a,%4
+ movx @dptr,a
+}
+
+
+replace {
+ jnz %1
+%1:
+} by {
+ ; Peephole 249.a jump optimization
+} if labelRefCount %1 1
+
+replace {
+ jz %1
+%1:
+} by {
+ ; Peephole 249.b jump optimization
+} if labelRefCount %1 1
+
+
+// This allows non-interrupt and interrupt code to safely compete
+// for a resource without the non-interrupt code having to disable
+// interrupts:
+// volatile bit resource_is_free;
// if( resource_is_free ) {
// resource_is_free=0; do_something; resource_is_free=1;
// }
%2:
} if labelRefCount %2 0
+// applies to f.e. bug-905492.c
+replace {
+ jnz %1
+%2:
+ mov %3,#0x00
+} by {
+ ; Peephole 256.c loading %3 with zero from a
+ jnz %1
+%2:
+ mov %3,a
+} if labelRefCount %2 0
+
+// applies to f.e. malloc.c
+replace {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %3,#0x00
+} by {
+ ; Peephole 256.d loading %3 with zero from a
+ jnz %1
+%2:
+ mov %4,%5
+ mov %3,a
+} if labelRefCount(%2 0),operandsNotRelated('a' %4)
+
+replace {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %3,#0x00
+} by {
+ ; Peephole 256.e loading %3 with zero from a
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %3,a
+} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6)
+
+replace {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %8,%9
+ mov %3,#0x00
+} by {
+ ; Peephole 256.f loading %2 with zero from a
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %8,%9
+ mov %3,a
+} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
+
// unsigned char i=8; do{ } while(--i != 0);
// this currently only applies if i is kept in a register
// helps f.e. writing data on a 3-wire (SPI) bus
replace {
mov a,%1
- mov c,acc.7
+ rlc a
mov %2,c
mov a,%1
add a,%1
// out_bit=out_byte&0x01; out_byte>>=1;
replace {
mov a,%1
- mov c,acc.0
+ rrc a
mov %2,c
mov a,%1
clr c
mov %1,a
} if notVolatile %1
+// Peepholes 259.x are not compatible with peepholex 250.x
+// Peepholes 250.x add jumps to a previously unused label. As the
+// labelRefCount is not increased, peepholes 259.x are (mistakenly) applied.
+// (Mail on sdcc-devel 2004-10-25)
+// Note: Peepholes 193..199, 251 remove jumps to previously used labels without
+// decreasing labelRefCount (less dangerous - this f.e. leads to 253.c being
+// applied instead of 253.b))
+//
+// applies to f.e. vprintf.c
+//replace {
+// sjmp %1
+//%2:
+// ret
+//} by {
+// sjmp %1
+// ; Peephole 259.a removed redundant label %2 and ret
+// ;
+//} if labelRefCount %2 0
+
+// applies to f.e. gets.c
+//replace {
+// ljmp %1
+//%2:
+// ret
+//} by {
+// ljmp %1
+// ; Peephole 259.b removed redundant label %2 and ret
+// ;
+//} if labelRefCount %2 0
+
+// optimizing jumptables
+// Please note: to enable peephole 260.x you currently have to set
+// the environment variable SDCC_SJMP_JUMPTABLE
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+%3:
+} by {
+ ; Peephole 260.a used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+%3:
+} by {
+ ; Peephole 260.b used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+%3:
+} by {
+ ; Peephole 260.c used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+%3:
+} by {
+ ; Peephole 260.d used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+%3:
+} by {
+ ; Peephole 260.e used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+%3:
+} by {
+ ; Peephole 260.f used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+%3:
+} by {
+ ; Peephole 260.g used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+%3:
+} by {
+ ; Peephole 260.h used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+%3:
+} by {
+ ; Peephole 260.i used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+%3:
+} by {
+ ; Peephole 260.j used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+ ljmp %18
+%3:
+} by {
+ ; Peephole 260.k used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+ sjmp %18
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+ ljmp %18
+ ljmp %19
+%3:
+} by {
+ ; Peephole 260.l used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+ sjmp %18
+ sjmp %19
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+ ljmp %18
+ ljmp %19
+ ljmp %20
+%3:
+} by {
+ ; Peephole 260.m used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+ sjmp %18
+ sjmp %19
+ sjmp %20
+%3:
+} if labelJTInRange
+
+// applies to: a = (a << 1) | (a >> 15);
+replace {
+ mov a,%1
+ rlc a
+ mov %1,a
+ mov a,%2
+ rlc a
+ mov %2,a
+ mov a,%1
+ mov acc.0,c
+ mov %1,a
+} by {
+ ; Peephole 261.a optimized left rol
+ mov a,%1
+ rlc a
+ xch a,%2
+ rlc a
+ xch a,%2
+ mov acc.0,c
+ mov %1,a
+}
+
+// applies to: a = (a << 15) | (a >> 1);
+replace {
+ mov a,%1
+ rrc a
+ mov %1,a
+ mov a,%2
+ rrc a
+ mov %2,a
+ mov a,%1
+ mov acc.7,c
+ mov %1,a
+} by {
+ ; Peephole 261.b optimized right rol
+ mov a,%1
+ rrc a
+ xch a,%2
+ rrc a
+ xch a,%2
+ mov acc.7,c
+ mov %1,a
+}