-//replace restart {
-// pop %1
-// push %1
-//} by {
-// ; Peephole 1 removed pop %1 push %1 (not push pop)
-//}
-
-//replace restart {
-// pop %1
-// mov %2,%3
-// push %1
-//} by {
-// ; Peephole 2 removed pop %1 push %1 (not push pop)
-// mov %2,%3
-//}
-
-//
// added by Jean Louis VERN for
// his shift stuff
replace {
mov %1,a
}
-replace {
+replace restart {
// saving 1 byte, loosing 1 cycle but maybe allowing peephole 3.b to start
mov %1,#0x00
mov %2,#0x00
movx @dptr,a
} if notVolatile %1
+// applies to f.e. lib/src/time.c (--model-large)
+replace {
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ mov a,%1
+ movx @dptr,a
+} by {
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ ; Peephole 101.a removed redundant moves
+ movx @dptr,a
+ inc dptr
+ movx @dptr,a
+ inc dptr
+ movx @dptr,a
+} if notVolatile %1
+
+// applies to f.e. support/regression/tests/literalop.c (--model-large)
+replace {
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ mov a,%1
+ movx @dptr,a
+} by {
+ mov a,%1
+ movx @dptr,a
+ inc dptr
+ ; Peephole 101.b removed redundant moves
+ movx @dptr,a
+ inc dptr
+ movx @dptr,a
+} if notVolatile %1
+
+// applies to f.e. support/regression/tests/onebyte.c (--model-large)
replace {
mov a,%1
movx @dptr,a
mov a,%1
movx @dptr,a
inc dptr
- ; Peephole 101 removed redundant mov
+ ; Peephole 101.c removed redundant mov
movx @dptr,a
} if notVolatile %1
%3:
mov dpl,%1
%7:
- mov sp,bp
- pop bp
+ mov sp,_bp
+ pop _bp
} by {
- ; Peephole 102 removed redundant mov
+ ; Peephole 102 removed redundant mov to %1
mov dpl,%2
ljmp %3
%4:
mov dpl,%5
%3:
%7:
- mov sp,bp
- pop bp
-} if notVolatile %1
+ mov sp,_bp
+ pop _bp
+} if notVolatile(%1), labelRefCount(%3 1)
replace {
mov %1,%2
%3:
mov dpl,%1
%7:
- mov sp,bp
- pop bp
+ mov sp,_bp
+ pop _bp
} by {
- ; Peephole 103 removed redundant mov
+ ; Peephole 103 removed redundant mov to %1
mov dpl,%2
ljmp %3
%4:
mov dpl,%5
%3:
%7:
- mov sp,bp
- pop bp
-}
+ mov sp,_bp
+ pop _bp
+} if labelRefCount(%3 1)
-replace {
- mov a,bp
- clr c
- add a,#0x01
- mov r%1,a
-} by {
- ; Peephole 104 optimized increment (acc not set to r%1, flags undefined)
- mov r%1,bp
- inc r%1
-}
+// Does not seem to be triggered anymore
+//replace {
+// mov a,_bp
+// clr c
+// add a,#0x01
+// mov r%1,a
+//} by {
+// ; Peephole 104 optimized increment (acc not set to r%1, flags undefined)
+// mov r%1,_bp
+// inc r%1
+//}
replace {
mov %1,a
mov a,%1
} by {
mov %1,a
-; Peephole 105 removed redundant mov
+ ; Peephole 105 removed redundant mov
} if notVolatile %1
replace {
} by {
; Peephole 107 removed redundant ljmp
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
jc %1
; Peephole 115.b jump optimization
jz %3
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
replace {
clr a
replace {
push psw
mov psw,%1
- push bp
- mov bp,%2
+ push _bp
+ mov _bp,%2
%3:
- mov %2,bp
- pop bp
+ mov %2,_bp
+ pop _bp
pop psw
ret
} by {
mov a,%2
add a,#0xff - %1
mov %3,c
-}
+} if operandsLiteral(%1)
replace {
clr c
mov a,%2
add a,#0xff - %1
jnc %5
-}
+} if operandsLiteral(%1)
replace {
clr c
mov a,%2
add a,#0xff - %1
jc %5
-}
+} if operandsLiteral(%1)
-replace {
- clr c
- mov a,%1
- subb a,#%2
- mov %3,c
-} by {
- ; Peephole 132.d optimized genCmpGt by inverse logic
- mov a,#0x100 - %2
- add a,%1
- mov %3,c
-} if operandsNotRelated('0x00' %2)
+// disabled. See bug1734654.c
+//replace {
+// clr c
+// mov a,%1
+// subb a,#%2
+// mov %3,c
+//} by {
+// ; Peephole 132.d optimized genCmpGt by inverse logic
+// mov a,#0x100 - %2
+// add a,%1
+// mov %3,c
+//} if operandsNotRelated('0x00' %2), operandsLiteral(%2)
replace {
clr c
mov a,#0x100 - %2
add a,%1
jc %5
-} if operandsNotRelated('0x00' %2)
+} if operandsNotRelated('0x00' %2), operandsLiteral(%2)
replace {
clr c
mov a,#0x100 - %2
add a,%1
jnc %5
-} if operandsNotRelated('0x00' %2)
+} if operandsNotRelated('0x00' %2), operandsLiteral(%2)
replace {
mov %1,a
mov dpl,%2
mov dph,%3
- ; Peephole 136 removed redundant move
+ ; Peephole 136 removed redundant mov
} if notVolatile %1
// WTF? Doesn't look sensible to me...
} by {
; Peephole 139.b removed redundant mov
orl a,%2
- mov r%1,a }
+ mov r%1,a
+}
replace {
mov r%1,a
mov r%1,a
}
+// applies to genlshOne
+replace {
+ mov ar%1,@%2
+ mov a,r%1
+ add a,acc
+ mov r%1,a
+} by {
+ ; Peephole 140 removed redundant mov
+ mov a,@%2
+ add a,@%2
+ mov r%1,a
+}
+
replace {
mov r%1,a
mov r%2,ar%1
mov ar%1,@r%2
} by {
- ; Peephole 142 removed redundant moves
+ ; Peephole 142 removed redundant mov
mov r%2,a
mov ar%1,@r%2
}
; Peephole 160.a removed sjmp by inverse jump logic
jc %2
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
jc %1
; Peephole 160.b removed sjmp by inverse jump logic
jnc %2
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
jnz %1
; Peephole 160.c removed sjmp by inverse jump logic
jz %2
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
jz %1
; Peephole 160.d removed sjmp by inverse jump logic
jnz %2
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
jnb %3,%1
; Peephole 160.e removed sjmp by inverse jump logic
jb %3,%2
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
jb %3,%1
; Peephole 160.f removed sjmp by inverse jump logic
jnb %3,%2
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
mov %1,%2
; Peephole 168 jump optimization
jb %1,%3
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
replace {
jb %1,%2
; Peephole 169 jump optimization
jnb %1,%3
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
replace {
clr a
mov %1,%2
mov %1,%3
} by {
- ; Peephole 177.c removed redundant move
+ ; Peephole 177.c removed redundant mov
mov %1,%3
-} if notVolatile %1 %2
+} if notVolatile(%1 %2),operandsNotRelated(%1 %3)
// applies to f.e. bug-408972.c
// not before peephole 177.c
} by {
mov %1,%2
mov %3,%4
- ; Peephole 177.d removed redundant move
+ ; Peephole 177.d removed redundant mov
} if notVolatile(%1 %2),operandsNotRelated(%1 %2 %3)
// applies to f.e. bug-607243.c
-// also check notVolatile %3, as it will return FALSE if it's @r%1
replace {
- mov r%1,%2
- mov ar%1,%3
+ mov %1,%2
+ mov a%1,%3
} by {
- ; peephole 177.e removed redundant move
- mov ar%1,%3
-} if notVolatile %2 %3
+ ; peephole 177.e removed redundant mov %1,%2
+ mov a%1,%3
+} if notVolatile(%2), operandsNotRelated(%1 %3)
replace {
mov ar%1,%2
mov r%1,%3
} by {
- ; peephole 177.f removed redundant move
+ ; peephole 177.f removed redundant mov
mov r%1,%3
} if notVolatile %2
mov %1,a
} if notVolatile %2
+// applies to f.e. testfwk.c
+replace {
+ mov r%1,a
+ mov ar%2,r%1
+} by {
+ mov r%1,a
+ ; peephole 177.i optimized mov sequence
+ mov r%2,a
+}
+
+replace {
+ mov r%1,%2
+ mov ar%3,r%1
+ mov r%1,%4
+} by {
+ ; peephole 177.j optimized mov sequence
+ mov r%3,%2
+ mov r%1,%4
+}
+
replace {
mov a,%1
mov b,a
mov %1,a
} if notVolatile %1
-replace {
-// acc being incremented might cause problems
- mov %1,a
- inc %1
-} by {
- ; Peephole 185 changed order of increment (acc incremented also!)
- inc a
- mov %1,a
-} if notVolatile %1
+//replace {
+// acc being incremented might cause problems with register tracking
+// mov %1,a
+// inc %1
+//} by {
+// ; Peephole 185 changed order of increment (acc incremented also!)
+// inc a
+// mov %1,a
+//} if notVolatile %1
-replace {
+// char indexed access to: long code table[] = {4,3,2,1};
+replace restart {
add a,#%1
mov dpl,a
clr a
mov %4,a
inc dptr
clr a
+ movc a,@a+dptr
} by {
; Peephole 186.a optimized movc sequence
+ mov b,a
mov dptr,#%1
- mov b,acc
movc a,@a+dptr
mov %2,a
- mov acc,b
inc dptr
+ mov a,b
movc a,@a+dptr
mov %3,a
- mov acc,b
inc dptr
+ mov a,b
movc a,@a+dptr
mov %4,a
- mov acc,b
inc dptr
+ mov a,b
+ movc a,@a+dptr
}
-replace {
+// char indexed access to: void* code table[] = {4,3,2,1};
+replace restart {
add a,#%1
mov dpl,a
clr a
mov %3,a
inc dptr
clr a
+ movc a,@a+dptr
} by {
; Peephole 186.b optimized movc sequence
+ mov b,a
mov dptr,#%1
- mov b,acc
movc a,@a+dptr
mov %2,a
- mov acc,b
inc dptr
+ mov a,b
movc a,@a+dptr
mov %3,a
- mov acc,b
inc dptr
+ mov a,b
+ movc a,@a+dptr
}
-replace {
+// char indexed access to: int code table[] = {4,3,2,1};
+replace restart {
add a,#%1
mov dpl,a
clr a
mov %2,a
inc dptr
clr a
+ movc a,@a+dptr
} by {
; Peephole 186.c optimized movc sequence
+ mov %2,a
mov dptr,#%1
- mov b,acc
movc a,@a+dptr
- mov %2,a
- mov acc,b
+ xch a,%2
inc dptr
+ movc a,@a+dptr
}
// char indexed access to: char code table[] = {4,3,2,1};
; Peephole 191 removed redundant mov
} if notVolatile %1
+// applies to f.e. regression/ports/mcs51/support.c
replace {
mov r%1,a
mov @r%2,ar%1
} by {
mov r%1,a
- ; Peephole 192 used a instead of ar%1 as source
+ ; Peephole 192.a used a instead of ar%1 as source
mov @r%2,a
}
+// applies to f.e. printf_large.c
+replace {
+ mov ar%1,@r%2
+ mov a,r%1
+} by {
+ ; Peephole 192.b used a instead of ar%1 as destination
+ mov a,@r%2
+ mov r%1,a
+}
+
replace {
jnz %3
mov a,%4
cjne %13,%14,%8
sjmp %7
%3:
-} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
+} if labelInRange(%8), labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
cjne %1,%2,%3
cjne %13,%14,%8
sjmp %7
%3:
-} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
+} if labelInRange(%8), labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
cjne @%1,%2,%3
cjne @%1,%14,%8
sjmp %7
%3:
-} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
+} if labelInRange(%8), labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
cjne %1,%2,%3
cjne %13,%14,%8
sjmp %7
%3:
-} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
+} if labelInRange(%8), labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
jnz %3
cjne %10,%11,%8
sjmp %7
%3:
-} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
+} if labelInRange(%8), labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
cjne %1,%2,%3
cjne %10,%11,%8
sjmp %7
%3:
-} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
+} if labelInRange(%8), labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
cjne @%1,%2,%3
cjne @%1,%11,%8
sjmp %7
%3:
-} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
+} if labelInRange(%8), labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
cjne %1,%2,%3
cjne %10,%11,%8
sjmp %7
%3:
-} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
+} if labelInRange(%8), labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
jnz %3
cjne %5,%6,%8
sjmp %7
%3:
-} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
+} if labelInRange(%8), labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne %1,%2,%3
cjne %5,%6,%8
sjmp %7
%3:
-} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
+} if labelInRange(%8), labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne @%1,%2,%3
cjne @%1,%6,%8
sjmp %7
%3:
-} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
+} if labelInRange(%8), labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne %1,%2,%3
cjne %5,%6,%8
sjmp %7
%3:
-} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
+} if labelInRange(%8), labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne %1,%2,%3
cjne %1,%2,%5
sjmp %4
%3:
-} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
+} if labelInRange(%5), labelRefCount(%3 1), labelRefCountChange(%3 -1)
replace {
sjmp %1
} by {
; Peephole 200.a removed redundant sjmp
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
sjmp %1
; Peephole 200.b removed redundant sjmp
%2:
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
push acc
mov %1,%1
} by {
; Peephole 206 removed redundant mov %1,%1
-} if notVolatile
+} if notVolatile %1
-replace {
- mov a,_bp
- add a,#0x00
- mov %1,a
-} by {
- ; Peephole 207 removed zero add (acc not set to %1, flags undefined)
- mov %1,_bp
-}
+// Does not seem to be triggered anymore
+//replace {
+// mov a,_bp
+// add a,#0x00
+// mov %1,a
+//} by {
+// ; Peephole 207 removed zero add (acc not set to %1, flags undefined)
+// mov %1,_bp
+//}
replace {
push acc
mov r%1,_bp
}
-replace {
- mov a,_bp
- add a,#0x00
- inc a
- mov %1,a
-} by {
- ; Peephole 209 optimized increment (acc not set to %1, flags undefined)
- mov %1,_bp
- inc %1
-}
+// Does not seem to be triggered anymore
+//replace {
+// mov a,_bp
+// add a,#0x00
+// inc a
+// mov %1,a
+//} by {
+// ; Peephole 209 optimized increment (acc not set to %1, flags undefined)
+// mov %1,_bp
+// inc %1
+//}
replace {
mov dptr,#((((%1 >> 8)) <<8) + %1)
; Peephole 211 removed redundant push %1 pop %1
}
-replace {
- mov a,_bp
- add a,#0x01
- mov r%1,a
-} by {
- ; Peephole 212 reduced add sequence to inc
- mov r%1,_bp
- inc r%1
-}
+// Does not seem to be triggered anymore
+//replace {
+// mov a,_bp
+// add a,#0x01
+// mov r%1,a
+//} by {
+// ; Peephole 212 reduced add sequence to inc
+// mov r%1,_bp
+// inc r%1
+//}
// reverts peephole 159? asx8051 cannot handle, too complex?
replace {
add a,%1
} by {
mov %1,a
- ; Peephole 214 reduced some extra moves
+ ; Peephole 214.a removed redundant mov
add a,%2
} if operandsNotSame
add a,%2
mov %1,a
} by {
- ; Peephole 215 removed some moves
+ ; Peephole 214.b removed redundant mov
add a,%2
mov %1,a
} if operandsNotSame
replace {
mov %1 + %2,(%2 + %1)
} by {
- ; Peephole 221.a remove redundant move
+ ; Peephole 221.a remove redundant mov
} if notVolatile
replace {
mov (%1 + %2 + %3),((%2 + %1) + %3)
} by {
- ; Peephole 221.b remove redundant move
+ ; Peephole 221.b remove redundant mov
} if notVolatile
replace {
; Peephole 225 removed redundant move to acc
} if notVolatile %1
+replace {
+ clr a
+ movx @%1,a
+ inc %1
+ clr a
+} by {
+ clr a
+ movx @%1,a
+ inc %1
+ ; Peephole 226.a removed unnecessary clr
+}
+
replace {
clr a
movx @%1,a
inc %1
movx @%1,a
inc %1
- ; Peephole 226 removed unnecessary clr
+ ; Peephole 226.b removed unnecessary clr
}
replace {
mov %3,%4
%1:
ret
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
replace {
sjmp %1
mov dph,%6
%1:
ret
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
// applies to f.e. device/lib/log10f.c
replace {
mov %7,%15
mov %8,%16
; Peephole 238.a removed 4 redundant moves
-} if operandsNotSame8 %1 %2 %3 %4 %5 %6 %7 %8
+} if operandsNotSame8(%1 %2 %3 %4 %5 %6 %7 %8), notVolatile(%1 %2 %3 %4 %9 %10 %11 %12)
// applies to device/lib/log10f.c
replace {
mov %3,%7
mov %4,%8
; Peephole 238.b removed 3 redundant moves
-} if operandsNotSame7 %1 %2 %3 %4 %5 %6 %7
+} if operandsNotSame7(%1 %2 %3 %4 %5 %6 %7), notVolatile(%1 %2 %3 %5 %6 %7)
// applies to f.e. device/lib/time.c
replace {
mov %3,%7
mov %4,%8
; Peephole 238.c removed 2 redundant moves
-} if operandsNotSame4 %1 %2 %3 %4
+} if operandsNotSame4(%1 %2 %3 %4), notVolatile(%1 %2 %5 %6)
// applies to f.e. support/regression/tests/bug-524209.c
replace {
mov %2,%5
mov %3,%6
; Peephole 238.d removed 3 redundant moves
-} if operandsNotSame6 %1 %2 %3 %4 %5 %6
+} if operandsNotSame6(%1 %2 %3 %4 %5 %6), notVolatile(%1 %2 %3 %4 %5 %6)
// applies to f.e. ser_ir.asm
replace {
inc a
%0:
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
// applies to generic pointer compare
replace {
inc a
%0:
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
// applies to f.e. time.c
replace {
inc a
%0:
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
// applies to f.e. malloc.c
replace {
inc a
%0:
%1:
-} labelRefCountChange(%1 -1)
+} if labelRefCountChange(%1 -1)
// applies to f.e. j = (k!=0x1000);
// with volatile idata long k;
inc a
%1:
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
// applies to f.e. j = (p!=NULL);
// with volatile idata char *p;
inc a
%1:
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
// applies to f.e. j = (k!=0x1000);
// with volatile idata int k;
inc a
%1:
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
// applies to f.e. vprintf.asm (--stack-auto)
replace {
inc a
%1:
%2:
-} labelRefCountChange(%2 -1)
+} if labelRefCountChange(%2 -1)
// applies to f.e. scott-bool1.c
replace {
} if notVolatile %1
-
-
// rules 247.x apply to f.e. bitfields.c
replace {
mov r%5,#%1
mov r%3,a
}
-// applies to: void test( char c ) { if( c ) func1(); else func2(); }
-replace {
- lcall %1
- ret
-} by {
- ; Peephole 253.a replaced lcall/ret with ljmp
- ljmp %1
-}
-
-// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+// unsigned char i=8; do{ } while(--i != 0);
+// this applies if i is kept in a register
replace {
- lcall %1
-%2:
- ret
+ dec %1
+ cjne %1,#0x00,%2
} by {
- ; Peephole 253.b replaced lcall/ret with ljmp
- ljmp %1
- ;
-} if labelRefCount %2 0
+ ; Peephole 253.a optimized decrement with compare
+ djnz %1,%2
+} if notVolatile(%1)
-// applies to f.e. scott-bool1.c
+// unsigned char i=8; do{ } while(--i != 0);
+// this applies if i is kept in data memory
+// must come before 256, see bug 1721024
replace {
- lcall %1
-%2:
- ret
+ dec %1
+ mov a,%1
+ jnz %2
} by {
- ; Peephole 253.c replaced lcall with ljmp
- ljmp %1
-%2:
- ret
-}
+ ; Peephole 253.b optimized decrement with compare
+ djnz %1,%2
+} if notVolatile(%1), operandsNotRelated(%1 '@r0' '@r1')
// applies to f.e. funptrs.c
} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
-// unsigned char i=8; do{ } while(--i != 0);
-// this currently only applies if i is kept in a register
-replace {
- dec %1
- cjne %1,#0x00,%2
-} by {
- ; Peephole 257 optimized decrement with compare
- djnz %1,%2
-} if notVolatile %1
-
-
// in_byte<<=1; if(in_bit) in_byte|=1;
// helps f.e. reading data on a 3-wire (SPI) bus
replace {
mov r%1,a
%4:
%3:
-} labelRefCountChange(%3 -1)
+} if labelRefCountChange(%3 -1)
// in_byte>>=1; if(in_bit) in_byte|=0x80;
replace {
mov r%1,a
%4:
%3:
-} labelRefCountChange(%3 -1)
+} if labelRefCountChange(%3 -1)
// out_bit=out_byte&0x80; out_byte<<=1;
// helps f.e. writing data on a 3-wire (SPI) bus
mov %1,a
}
+replace {
+ cpl c
+ cpl c
+} by {
+ ; Peephole 262 removed redundant cpl c
+}
+
+replace {
+ mov %1,#%2
+ inc %1
+ inc %1
+ inc %1
+} by {
+ ; Peephole 263.a optimized loading const
+ mov %1,#(%2 + 3)
+} if notVolatile(%1)
+
+replace {
+ mov %1,#%2
+ inc %1
+ inc %1
+} by {
+ ; Peephole 263.b optimized loading const
+ mov %1,#(%2 + 2)
+} if notVolatile(%1)
+
+replace {
+ mov %1,#%2
+ inc %1
+} by {
+ ; Peephole 263.c optimized loading const
+ mov %1,#(%2 + 1)
+} if notVolatile(%1)
+
+
+replace {
+ clr a
+ cjne %1,%2,%3
+ inc a
+%3:
+ jz %4
+} by {
+ ; Peephole 264 jump optimization (acc not set)
+ cjne %1,%2,%4
+%3:
+} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
+
+
+replace {
+ mov %1,c
+ cpl %1
+} by {
+ ; Peephole 265 optimized mov/cpl sequence (carry differs)
+ cpl c
+ mov %1,c
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ jb %1,%2
+} by {
+ ; Peephole 266.a optimized mov/jump sequence
+ mov %1,c
+ jc %2
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ jnb %1,%2
+} by {
+ ; Peephole 266.b optimized mov/jump sequence
+ mov %1,c
+ jnc %2
+} if notVolatile(%1)
+
+replace {
+ jnc %1
+ setb %2
+ sjmp %3
+%1:
+ clr %2
+%3:
+} by {
+ ; Peephole 267.a optimized mov bit sequence
+ mov %2,c
+%1:
+%3:
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
+
+replace {
+ jc %1
+ clr %2
+ sjmp %3
+%1:
+ setb %2
+%3:
+} by {
+ ; Peephole 267.b optimized mov bit sequence
+ mov %2,c
+%1:
+%3:
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
+
+replace {
+ mov %1,c
+ mov %1,c
+} by {
+ ; Peephole 268 removed redundant mov
+ mov %1,c
+} if notVolatile(%1)
+
+replace {
+ mov %1,c
+ mov c,%1
+} by {
+ ; Peephole 269 removed redundant mov
+ mov %1,c
+} if notVolatile(%1)
+
+//accessing struct/array on stack
+//replace {
+// add a,#%1
+// add a,#%2
+//} by {
+// ; Peephole 270 removed redundant add (carry might differ, bug 2736282)
+// add a,#%1+%2
+//}
+
+replace {
+ jz %1
+ mov %2,%4
+ sjmp %3
+%1:
+ mov %2,#0x00
+%3:
+} by {
+ jz %1
+ ; Peephole 271 optimized ternary operation (acc different)
+ mov a,%4
+%1:
+ mov %2,a
+%3:
+} if operandsNotRelated('a' 'dptr' %2), labelRefCount(%1 1), labelRefCountChange(%3 -1)
+
+
+replace restart {
+ pop ar%1
+} by {
+ ; Peephole 300 pop ar%1 removed
+} if deadMove(%1)
+
+replace {
+ mov r%1,%2
+} by {
+ ; Peephole 301 mov r%1,%2 removed
+} if notVolatile(%2), deadMove(%1)
+
+
+// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+replace {
+ lcall %1
+ ret
+} by {
+ ; Peephole 400.a replaced lcall/ret with ljmp
+ ljmp %1
+}
+
+// applies to: void test( char c ) { if( c ) func1(); else func2(); }
+replace {
+ lcall %1
+%2:
+ ret
+} by {
+ ; Peephole 400.b replaced lcall/ret with ljmp
+ ljmp %1
+ ;
+} if labelRefCount %2 0
+
+// applies to f.e. scott-bool1.c
+replace {
+ lcall %1
+%2:
+ ret
+} by {
+ ; Peephole 400.c replaced lcall with ljmp
+ ljmp %1
+%2:
+ ret
+}
+
+// for programs less than 2k
+replace {
+ lcall %1
+} by {
+ ; Peephole 400.d replaced lcall with acall
+ acall %1
+} if useAcallAjmp
+
+// for programs less than 2k
+replace {
+ ljmp %1
+} by {
+ ; Peephole 400.e replaced ljmp with ajmp
+ ajmp %1
+} if useAcallAjmp
+
+
// should be one of the last peepholes
replace{
%1:
} by {
- ; Peephole 300 removed redundant label %1
+ ; Peephole 500 removed redundant label %1
} if labelRefCount(%1 0)