+
+// applies to: a = (a << 1) | (a >> 15);
+replace {
+ mov a,%1
+ rlc a
+ mov %1,a
+ mov a,%2
+ rlc a
+ mov %2,a
+ mov a,%1
+ mov acc.0,c
+ mov %1,a
+} by {
+ mov a,%1
+ rlc a
+ ; Peephole 261.a optimized left rol
+ xch a,%2
+ rlc a
+ xch a,%2
+ mov acc.0,c
+ mov %1,a
+}
+
+// applies to: a = (a << 15) | (a >> 1);
+replace {
+ mov a,%1
+ rrc a
+ mov %1,a
+ mov a,%2
+ rrc a
+ mov %2,a
+ mov a,%1
+ mov acc.7,c
+ mov %1,a
+} by {
+ mov a,%1
+ rrc a
+ ; Peephole 261.b optimized right rol
+ xch a,%2
+ rrc a
+ xch a,%2
+ mov acc.7,c
+ mov %1,a
+}
+
+replace {
+ cpl c
+ cpl c
+} by {
+ ; Peephole 262 removed redundant cpl c
+}
+
+replace {
+ mov %1,#%2
+ inc %1
+ inc %1
+ inc %1
+} by {
+ ; Peephole 263.a optimized loading const
+ mov %1,#(%2 + 3)
+} if notVolatile(%1)
+
+replace {
+ mov %1,#%2
+ inc %1
+ inc %1
+} by {
+ ; Peephole 263.b optimized loading const
+ mov %1,#(%2 + 2)
+} if notVolatile(%1)
+
+replace {
+ mov %1,#%2
+ inc %1
+} by {
+ ; Peephole 263.c optimized loading const
+ mov %1,#(%2 + 1)
+} if notVolatile(%1)
+
+
+replace {
+ clr a
+ cjne %1,%2,%3
+ inc a
+%3:
+ jz %4
+} by {
+ ; Peephole 264 jump optimization (acc not set)
+ cjne %1,%2,%4
+%3:
+} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
+
+
+replace {
+ mov %1,c
+ cpl %1
+} by {
+ ; Peephole 265 optimized mov/cpl sequence (carry differs)
+ cpl c
+ mov %1,c
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ jb %1,%2
+} by {
+ ; Peephole 266.a optimized mov/jump sequence
+ mov %1,c
+ jc %2
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ jnb %1,%2
+} by {
+ ; Peephole 266.b optimized mov/jump sequence
+ mov %1,c
+ jnc %2
+} if notVolatile(%1)
+
+replace {
+ jnc %1
+ setb %2
+ sjmp %3
+%1:
+ clr %2
+%3:
+} by {
+ ; Peephole 267.a optimized mov bit sequence
+ mov %2,c
+%1:
+%3:
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
+
+replace {
+ jc %1
+ clr %2
+ sjmp %3
+%1:
+ setb %2
+%3:
+} by {
+ ; Peephole 267.b optimized mov bit sequence
+ mov %2,c
+%1:
+%3:
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
+
+replace {
+ mov %1,c
+ mov %1,c
+} by {
+ ; Peephole 268 removed redundant mov
+ mov %1,c
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ mov c,%1
+} by {
+ ; Peephole 269 removed redundant mov
+ mov %1,c
+} if notVolatile(%1)
+
+//accessing struct/array on stack
+replace {
+ add a,#%1
+ add a,#%2
+} by {
+ ; Peephole 270 removed redundant add
+ add a,#%1+%2
+}
+
+replace {
+ jz %1
+ mov %2,%4
+ sjmp %3
+%1:
+ mov %2,#0x00
+%3:
+} by {
+ jz %1
+ ; Peephole 271 optimized ternary operation (acc different)
+ mov a,%4
+%1:
+ mov %2,a
+%3:
+} if operandsNotRelated('a' 'dptr' %2), labelRefCount(%1 1), labelRefCountChange(%3 -1)
+
+
+replace restart {
+ pop ar%1
+} by {
+ ; Peephole 300 pop ar%1 removed
+} if deadMove(%1)
+
+replace {
+ mov r%1,%2
+} by {
+ ; Peephole 301 mov r%1,%2 removed
+} if notVolatile(%2), deadMove(%1)
+
+
+// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+replace {
+ lcall %1
+ ret
+} by {
+ ; Peephole 400.a replaced lcall/ret with ljmp
+ ljmp %1
+}
+
+// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+replace {
+ lcall %1
+%2:
+ ret
+} by {
+ ; Peephole 400.b replaced lcall/ret with ljmp
+ ljmp %1
+ ;
+} if labelRefCount %2 0
+
+// applies to f.e. scott-bool1.c
+replace {
+ lcall %1
+%2:
+ ret
+} by {
+ ; Peephole 400.c replaced lcall with ljmp
+ ljmp %1
+%2:
+ ret
+}
+
+// for programs less than 2k
+replace {
+ lcall %1
+} by {
+ ; Peephole 400.d replaced lcall with acall
+ acall %1
+} if useAcallAjmp
+
+// for programs less than 2k
+replace {
+ ljmp %1
+} by {
+ ; Peephole 400.e replaced ljmp with ajmp
+ ajmp %1
+} if useAcallAjmp
+
+
+// should be one of the last peepholes
+replace{
+%1:
+} by {
+ ; Peephole 500 removed redundant label %1
+} if labelRefCount(%1 0)