+// Peeepholes 248.i to 248.m are like 248.d to 248.h except they apply to bitfields:
+// xdata struct { unsigned b0:1; unsigned b1:1; unsigned b2:1; } xport;
+// xport.b0=1; xport.b0=0; xport.b0=1;
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%5
+ movx @dptr,a
+} by {
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+ ; Peephole 248.i optimized or/and/or to xdata bitfield
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+ movx a,@dptr
+ orl a,%5
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%5
+ movx @dptr,a
+} by {
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+ ; Peephole 248.j optimized and/or/and to xdata bitfield
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+ movx a,@dptr
+ anl a,%5
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+} by {
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%3
+ movx @dptr,a
+ ; Peephole 248.k optimized or/and to xdata bitfield
+ movx a,@dptr
+ anl a,%4
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+} by {
+ mov dptr,%1
+ movx a,@dptr
+ anl a,%3
+ movx @dptr,a
+ ; Peephole 248.l optimized and/or to xdata bitfield
+ movx a,@dptr
+ orl a,%4
+ movx @dptr,a
+}
+
+replace {
+ mov dptr,%1
+ movx a,@dptr
+ xrl a,%3
+ movx @dptr,a
+
+ mov dptr,%1
+ movx a,@dptr
+ xrl a,%4
+ movx @dptr,a
+} by {
+ mov dptr,%1
+ movx a,@dptr
+ xrl a,%3
+ movx @dptr,a
+ ; Peephole 248.m optimized xor/xor to xdata bitfield
+ movx a,@dptr
+ xrl a,%4
+ movx @dptr,a
+}
+
+
+replace {
+ jnz %1
+%1:
+} by {
+ ; Peephole 249.a jump optimization
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1)
+
+replace {
+ jz %1
+%1:
+} by {
+ ; Peephole 249.b jump optimization
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1)
+
+
+// This allows non-interrupt and interrupt code to safely compete
+// for a resource without the non-interrupt code having to disable
+// interrupts:
+// volatile bit resource_is_free;
+// if( resource_is_free ) {
+// resource_is_free=0; do_something; resource_is_free=1;
+// }
+replace {
+ jnb %1,%2
+%3:
+ clr %1
+} by {
+ ; Peephole 250.a using atomic test and clear
+ jbc %1,%3
+ sjmp %2
+%3:
+} if labelRefCount(%3 0), labelRefCountChange(%3 1)
+
+replace {
+ jb %1,%2
+ ljmp %3
+%2:
+ clr %1
+} by {
+ ; Peephole 250.b using atomic test and clear
+ jbc %1,%2
+ ljmp %3
+%2:
+} if labelRefCount %2 1
+
+
+// not before peephole 250.b
+replace {
+ ljmp %5
+} by {
+ ; Peephole 251.a replaced ljmp to ret with ret
+ ret
+} if labelIsReturnOnly(), labelRefCountChange(%5 -1)
+
+// not before peephole 250.b
+replace {
+ sjmp %5
+} by {
+ ; Peephole 251.b replaced sjmp to ret with ret
+ ret
+} if labelIsReturnOnly(), labelRefCountChange(%5 -1)
+
+// applies to shifts.c and when accessing arrays with an unsigned integer index
+// saving 1 byte, 2 cycles
+replace {
+ mov r%1,%2
+ mov a,(%2 + 1)
+ xch a,r%1
+ add a,acc
+ xch a,r%1
+ rlc a
+ mov r%3,a
+} by {
+ ; Peephole 252 optimized left shift
+ mov a,%2
+ add a,acc
+ mov r%1,a
+ mov a,(%2 + 1)
+ rlc a
+ mov r%3,a
+}
+
+// unsigned char i=8; do{ } while(--i != 0);
+// this applies if i is kept in a register
+replace {
+ dec %1
+ cjne %1,#0x00,%2
+} by {
+ ; Peephole 253.a optimized decrement with compare
+ djnz %1,%2
+} if notVolatile(%1)
+
+// unsigned char i=8; do{ } while(--i != 0);
+// this applies if i is kept in data memory
+// must come before 256, see bug 1721024
+replace {
+ dec %1
+ mov a,%1
+ jnz %2
+} by {
+ ; Peephole 253.b optimized decrement with compare
+ djnz %1,%2
+} if notVolatile(%1), operandsNotRelated(%1 '@r0' '@r1')
+
+
+// applies to f.e. funptrs.c
+// saves one byte if %1 is a register or @register
+replace {
+ mov a,%1
+ add a,acc
+} by {
+ mov a,%1
+ ; Peephole 254 optimized left shift
+ add a,%1
+} if notVolatile %1
+
+// applies to f.e. switch.c
+replace {
+ clr c
+ mov a,#%1
+ subb a,%2
+ jc %3
+%4:
+ mov a,%2
+ add a,%2
+ add a,%2
+ mov dptr,%5
+ jmp @a+dptr
+} by {
+ ; Peephole 255 optimized jump table index calculation
+ mov a,%2
+ cjne a,#(%1+0x01),.+1
+ jnc %3
+%4:
+ add a,%2
+ add a,%2
+ mov dptr,%5
+ jmp @a+dptr
+}
+
+// applies to f.e. jump tables and scott-bool1.c.
+// similar peepholes can be constructed for other instructions
+// after which a flag or a register is known (like: djnz, cjne, jnc)
+replace {
+ jc %1
+%2:
+ clr c
+} by {
+ jc %1
+%2:
+ ; Peephole 256.a removed redundant clr c
+} if labelRefCount %2 0
+
+// applies to f.e. logf.c
+replace {
+ jnz %1
+%2:
+ clr a
+} by {
+ jnz %1
+%2:
+ ; Peephole 256.b removed redundant clr a
+} if labelRefCount %2 0
+
+// applies to f.e. bug-905492.c
+replace {
+ jnz %1
+%2:
+ mov %3,#0x00
+} by {
+ jnz %1
+%2:
+ ; Peephole 256.c loading %3 with zero from a
+ mov %3,a
+} if labelRefCount %2 0
+
+// applies to f.e. malloc.c
+replace {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %3,#0x00
+} by {
+ jnz %1
+%2:
+ mov %4,%5
+ ; Peephole 256.d loading %3 with zero from a
+ mov %3,a
+} if labelRefCount(%2 0),operandsNotRelated('a' %4)
+
+replace {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %3,#0x00
+} by {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ ; Peephole 256.e loading %3 with zero from a
+ mov %3,a
+} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6)
+
+replace {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %8,%9
+ mov %3,#0x00
+} by {
+ jnz %1
+%2:
+ mov %4,%5
+ mov %6,%7
+ mov %8,%9
+ ; Peephole 256.f loading %2 with zero from a
+ mov %3,a
+} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
+
+
+// in_byte<<=1; if(in_bit) in_byte|=1;
+// helps f.e. reading data on a 3-wire (SPI) bus
+replace {
+ mov a,%1
+ add a,%1
+ mov %1,a
+ jnb %2,%3
+%4:
+ orl %1,#0x01
+%3:
+} by {
+ mov a,%1
+ ; Peephole 258.a optimized bitbanging
+ mov c,%2
+ addc a,%1
+ mov %1,a
+%4:
+%3:
+} if notVolatile(%1), labelRefCountChange(%3 -1)
+
+// in_byte<<=1; if(in_bit) in_byte|=1;
+replace {
+ mov a,r%1
+ add a,r%1
+ mov r%1,a
+ jnb %2,%3
+%4:
+ orl ar%1,#0x01
+%3:
+} by {
+ mov a,r%1
+ ; Peephole 258.b optimized bitbanging
+ mov c,%2
+ addc a,r%1
+ mov r%1,a
+%4:
+%3:
+} if labelRefCountChange(%3 -1)
+
+// in_byte>>=1; if(in_bit) in_byte|=0x80;
+replace {
+ mov a,%1
+ clr c
+ rrc a
+ mov %1,a
+ jnb %2,%3
+%4:
+ orl %1,#0x80
+%3:
+} by {
+ mov a,%1
+ ; Peephole 258.c optimized bitbanging
+ mov c,%2
+ rrc a
+ mov %1,a
+%4:
+%3:
+} if notVolatile(%1), labelRefCountChange(%3 -1)
+
+// in_byte>>=1; if(in_bit) in_byte|=0x80;
+replace {
+ mov a,r%1
+ clr c
+ rrc a
+ mov r%1,a
+ jnb %2,%3
+%4:
+ orl ar%1,#0x80
+%3:
+} by {
+ mov a,r%1
+ ; Peephole 258.d optimized bitbanging
+ mov c,%2
+ rrc a
+ mov r%1,a
+%4:
+%3:
+} if labelRefCountChange(%3 -1)
+
+// out_bit=out_byte&0x80; out_byte<<=1;
+// helps f.e. writing data on a 3-wire (SPI) bus
+replace {
+ mov a,%1
+ rlc a
+ mov %2,c
+ mov a,%1
+ add a,%1
+ mov %1,a
+} by {
+ mov a,%1
+ ; Peephole 258.e optimized bitbanging
+ add a,%1
+ mov %2,c
+ mov %1,a
+} if notVolatile %1
+
+// out_bit=out_byte&0x01; out_byte>>=1;
+replace {
+ mov a,%1
+ rrc a
+ mov %2,c
+ mov a,%1
+ clr c
+ rrc a
+ mov %1,a
+} by {
+ mov a,%1
+ ; Peephole 258.f optimized bitbanging
+ clr c
+ rrc a
+ mov %2,c
+ mov %1,a
+} if notVolatile %1
+
+// Peepholes 259.x rely on the correct labelRefCount. Otherwise they are
+// not compatible with peepholes 250.x
+// Peepholes 250.x add jumps to a previously unused label. If the
+// labelRefCount is not increased, peepholes 259.x are (mistakenly) applied.
+// (Mail on sdcc-devel 2004-10-25)
+//
+// applies to f.e. vprintf.c
+replace {
+ sjmp %1
+%2:
+ ret
+} by {
+ sjmp %1
+ ; Peephole 259.a removed redundant label %2 and ret
+ ;
+} if labelRefCount %2 0
+
+// applies to f.e. gets.c
+replace {
+ ljmp %1
+%2:
+ ret
+} by {
+ ljmp %1
+ ; Peephole 259.b removed redundant label %2 and ret
+ ;
+} if labelRefCount %2 0
+
+// optimizing jumptables
+// Please note: to enable peephole 260.x you currently have to set
+// the environment variable SDCC_SJMP_JUMPTABLE
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+%3:
+} by {
+ ; Peephole 260.a used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+%3:
+} by {
+ ; Peephole 260.b used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+%3:
+} by {
+ ; Peephole 260.c used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+%3:
+} by {
+ ; Peephole 260.d used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+%3:
+} by {
+ ; Peephole 260.e used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+%3:
+} by {
+ ; Peephole 260.f used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+%3:
+} by {
+ ; Peephole 260.g used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+%3:
+} by {
+ ; Peephole 260.h used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+%3:
+} by {
+ ; Peephole 260.i used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+%3:
+} by {
+ ; Peephole 260.j used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+ ljmp %18
+%3:
+} by {
+ ; Peephole 260.k used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+ sjmp %18
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+ ljmp %18
+ ljmp %19
+%3:
+} by {
+ ; Peephole 260.l used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+ sjmp %18
+ sjmp %19
+%3:
+} if labelJTInRange
+
+// optimizing jumptables
+replace {
+ add a,%1
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ ljmp %5
+ ljmp %6
+ ljmp %7
+ ljmp %8
+ ljmp %9
+ ljmp %10
+ ljmp %11
+ ljmp %12
+
+ ljmp %13
+ ljmp %14
+ ljmp %15
+ ljmp %16
+ ljmp %17
+ ljmp %18
+ ljmp %19
+ ljmp %20
+%3:
+} by {
+ ; Peephole 260.m used sjmp in jumptable
+ mov dptr,#%2
+ jmp @a+dptr
+%2:
+ sjmp %5
+ sjmp %6
+ sjmp %7
+ sjmp %8
+ sjmp %9
+ sjmp %10
+ sjmp %11
+ sjmp %12
+
+ sjmp %13
+ sjmp %14
+ sjmp %15
+ sjmp %16
+ sjmp %17
+ sjmp %18
+ sjmp %19
+ sjmp %20
+%3:
+} if labelJTInRange
+
+// applies to: a = (a << 1) | (a >> 15);
+replace {
+ mov a,%1
+ rlc a
+ mov %1,a
+ mov a,%2
+ rlc a
+ mov %2,a
+ mov a,%1
+ mov acc.0,c
+ mov %1,a
+} by {
+ mov a,%1
+ rlc a
+ ; Peephole 261.a optimized left rol
+ xch a,%2
+ rlc a
+ xch a,%2
+ mov acc.0,c
+ mov %1,a
+}
+
+// applies to: a = (a << 15) | (a >> 1);
+replace {
+ mov a,%1
+ rrc a
+ mov %1,a
+ mov a,%2
+ rrc a
+ mov %2,a
+ mov a,%1
+ mov acc.7,c
+ mov %1,a
+} by {
+ mov a,%1
+ rrc a
+ ; Peephole 261.b optimized right rol
+ xch a,%2
+ rrc a
+ xch a,%2
+ mov acc.7,c
+ mov %1,a
+}
+
+replace {
+ cpl c
+ cpl c
+} by {
+ ; Peephole 262 removed redundant cpl c
+}
+
+replace {
+ mov %1,#%2
+ inc %1
+ inc %1
+ inc %1
+} by {
+ ; Peephole 263.a optimized loading const
+ mov %1,#(%2 + 3)
+} if notVolatile(%1)
+
+replace {
+ mov %1,#%2
+ inc %1
+ inc %1
+} by {
+ ; Peephole 263.b optimized loading const
+ mov %1,#(%2 + 2)
+} if notVolatile(%1)
+
+replace {
+ mov %1,#%2
+ inc %1
+} by {
+ ; Peephole 263.c optimized loading const
+ mov %1,#(%2 + 1)
+} if notVolatile(%1)
+
+
+replace {
+ clr a
+ cjne %1,%2,%3
+ inc a
+%3:
+ jz %4
+} by {
+ ; Peephole 264 jump optimization (acc not set)
+ cjne %1,%2,%4
+%3:
+} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
+
+
+replace {
+ mov %1,c
+ cpl %1
+} by {
+ ; Peephole 265 optimized mov/cpl sequence (carry differs)
+ cpl c
+ mov %1,c
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ jb %1,%2
+} by {
+ ; Peephole 266.a optimized mov/jump sequence
+ mov %1,c
+ jc %2
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ jnb %1,%2
+} by {
+ ; Peephole 266.b optimized mov/jump sequence
+ mov %1,c
+ jnc %2
+} if notVolatile(%1)
+
+replace {
+ jnc %1
+ setb %2
+ sjmp %3
+%1:
+ clr %2
+%3:
+} by {
+ ; Peephole 267.a optimized mov bit sequence
+ mov %2,c
+%1:
+%3:
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
+
+replace {
+ jc %1
+ clr %2
+ sjmp %3
+%1:
+ setb %2
+%3:
+} by {
+ ; Peephole 267.b optimized mov bit sequence
+ mov %2,c
+%1:
+%3:
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
+
+replace {
+ mov %1,c
+ mov %1,c
+} by {
+ ; Peephole 268 removed redundant mov
+ mov %1,c
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ mov c,%1
+} by {
+ ; Peephole 269 removed redundant mov
+ mov %1,c
+} if notVolatile(%1)
+
+//accessing struct/array on stack
+replace {
+ add a,#%1
+ add a,#%2
+} by {
+ ; Peephole 270 removed redundant add
+ add a,#%1+%2
+}
+
+replace {
+ jz %1
+ mov %2,%4
+ sjmp %3
+%1:
+ mov %2,#0x00
+%3:
+} by {
+ jz %1
+ ; Peephole 271 optimized ternary operation (acc different)
+ mov a,%4
+%1:
+ mov %2,a
+%3:
+} if operandsNotRelated('a' 'dptr' %2), labelRefCount(%1 1), labelRefCountChange(%3 -1)
+
+
+replace restart {
+ pop ar%1
+} by {
+ ; Peephole 300 pop ar%1 removed
+} if deadMove(%1)
+
+replace {
+ mov r%1,%2
+} by {
+ ; Peephole 301 mov r%1,%2 removed
+} if notVolatile(%2), deadMove(%1)
+
+
+// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+replace {
+ lcall %1
+ ret
+} by {
+ ; Peephole 400.a replaced lcall/ret with ljmp
+ ljmp %1
+}
+
+// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+replace {
+ lcall %1
+%2:
+ ret
+} by {
+ ; Peephole 400.b replaced lcall/ret with ljmp
+ ljmp %1
+ ;
+} if labelRefCount %2 0
+
+// applies to f.e. scott-bool1.c
+replace {
+ lcall %1
+%2:
+ ret
+} by {
+ ; Peephole 400.c replaced lcall with ljmp
+ ljmp %1
+%2:
+ ret
+}
+
+// for programs less than 2k
+replace {
+ lcall %1
+} by {
+ ; Peephole 400.d replaced lcall with acall
+ acall %1
+} if useAcallAjmp
+
+// for programs less than 2k
+replace {
+ ljmp %1
+} by {
+ ; Peephole 400.e replaced ljmp with ajmp
+ ajmp %1
+} if useAcallAjmp
+
+
+// should be one of the last peepholes
+replace{
+%1:
+} by {
+ ; Peephole 500 removed redundant label %1
+} if labelRefCount(%1 0)