clr a
mov %1,a
mov %2,a
- ; Peephole 3.h changed mov %3,#0x00 to %3,a
+ ; Peephole 3.h changed mov %3,#0x00 to ...,a
mov %3,a
}
mov %1,a
mov %2,a
mov %3,a
- ; Peephole 3.i changed mov %4,#0x00 to %4,a
+ ; Peephole 3.i changed mov %4,#0x00 to ...,a
mov %4,a
}
mov %2,a
mov %3,a
mov %4,a
- ; Peephole 3.j changed mov %5,#0x00 to %5,a
+ ; Peephole 3.j changed mov %5,#0x00 to ...,a
mov %5,a
}
mov %3,a
mov %4,a
mov %5,a
- ; Peephole 3.k changed mov %6,#0x00 to %6,a
+ ; Peephole 3.k changed mov %6,#0x00 to ...,a
mov %6,a
}
mov a,%1
movx @dptr,a
} by {
- ; Peephole 100 removed redundant mov
mov %1,a
mov dptr,#%2
+ ; Peephole 100 removed redundant mov
movx @dptr,a
} if notVolatile %1
mov a,%1
movx @dptr,a
} by {
- ; Peephole 101 removed redundant mov
mov a,%1
movx @dptr,a
inc dptr
+ ; Peephole 101 removed redundant mov
movx @dptr,a
} if notVolatile %1
mov %1,a
mov a,%1
} by {
- ; Peephole 105 removed redundant mov
mov %1,a
+; Peephole 105 removed redundant mov
} if notVolatile %1
replace {
clr c
mov a,%1
} by {
- ; Peephole 106 removed redundant mov
mov %1,a
clr c
+ ; Peephole 106 removed redundant mov
} if notVolatile %1
replace {
} by {
; Peephole 107 removed redundant ljmp
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
jc %1
ljmp %5
%1:
} by {
- ; Peephole 108 removed ljmp by inverse jump logic
+ ; Peephole 108.a removed ljmp by inverse jump logic
jnc %5
%1:
-} if labelInRange
+} if labelInRange(), labelRefCountChange(%1 -1)
replace {
jz %1
ljmp %5
%1:
} by {
- ; Peephole 109 removed ljmp by inverse jump logic
+ ; Peephole 108.b removed ljmp by inverse jump logic
jnz %5
%1:
-} if labelInRange
+} if labelInRange(), labelRefCountChange(%1 -1)
replace {
jnz %1
ljmp %5
%1:
} by {
- ; Peephole 110 removed ljmp by inverse jump logic
+ ; Peephole 108.c removed ljmp by inverse jump logic
jz %5
%1:
-} if labelInRange
+} if labelInRange(), labelRefCountChange(%1 -1)
replace {
jb %1,%2
ljmp %5
%2:
} by {
- ; Peephole 111 removed ljmp by inverse jump logic
+ ; Peephole 108.d removed ljmp by inverse jump logic
jnb %1,%5
%2:
-} if labelInRange
+} if labelInRange(), labelRefCountChange(%2 -1)
replace {
jnb %1,%2
ljmp %5
%2:
} by {
- ; Peephole 112.a removed ljmp by inverse jump logic
+ ; Peephole 108.e removed ljmp by inverse jump logic
jb %1,%5
%2:
-} if labelInRange
+} if labelInRange(), labelRefCountChange(%2 -1)
replace {
ljmp %5
rrc a
mov %4,c
} by {
- ; Peephole 113 optimized misc sequence
+ ; Peephole 113.a optimized misc sequence
clr %4
cjne %1,%2,%3
setb %4
rrc a
mov %4,c
} by {
- ; Peephole 114 optimized misc sequence
+ ; Peephole 113.b optimized misc sequence
clr %4
cjne %1,%2,%3
cjne %10,%11,%3
%3:
jnz %4
} by {
- ; Peephole 115.a jump optimization
+ ; Peephole 115.a jump optimization (acc not set)
cjne %1,%2,%3
sjmp %4
%3:
sjmp %3
%2:
} by {
- ; Peephole 115.b jump optimization
mov %1,a
+ ; Peephole 115.b jump optimization
jz %3
%2:
-}
+} labelRefCountChange(%2 -1)
replace {
clr a
%3:
jnz %4
} by {
- ; Peephole 116 jump optimization
+ ; Peephole 115.c jump optimization (acc not set)
cjne %1,%2,%3
cjne %9,%10,%3
sjmp %4
%3:
jnz %4
} by {
- ; Peephole 117 jump optimization
+ ; Peephole 115.d jump optimization (acc not set)
cjne %1,%2,%3
cjne %9,%10,%3
cjne %11,%12,%3
%3:
jnz %4
} by {
- ; Peephole 118 jump optimization
+ ; Peephole 115.e jump optimization (acc not set)
cjne %1,%2,%3
cjne %9,%10,%3
cjne %11,%12,%3
%3:
jnz %4
} by {
- ; Peephole 119 jump optimization
+ ; Peephole 115.f jump optimization (acc not set)
cjne %1,%2,%4
%3:
-} if labelRefCount %3 1
+} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
replace {
mov a,#0x01
%3:
jnz %4
} by {
- ; Peephole 120 jump optimization
+ ; Peephole 115.g jump optimization (acc not set)
cjne %1,%2,%4
cjne %10,%11,%4
%3:
-} if labelRefCount %3 2
+} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
replace {
mov a,#0x01
%3:
jnz %4
} by {
- ; Peephole 121 jump optimization
+ ; Peephole 115.h jump optimization (acc not set)
cjne %1,%2,%4
cjne %10,%11,%4
cjne %12,%13,%4
%3:
-} if labelRefCount %3 3
+} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
replace {
mov a,#0x01
%3:
jnz %4
} by {
- ; Peephole 122 jump optimization
+ ; Peephole 115.i jump optimization (acc not set)
cjne %1,%2,%4
cjne %10,%11,%4
cjne %12,%13,%4
cjne %14,%15,%4
%3:
-} if labelRefCount %3 4
+} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
replace {
mov a,#0x01
%3:
jz %4
} by {
- ; Peephole 123 jump optimization
+ ; Peephole 115.j jump optimization (acc not set)
cjne %1,%2,%3
sjmp %4
%3:
%3:
jz %4
} by {
- ; Peephole 124 jump optimization
+ ; Peephole 115.k jump optimization (acc not set)
cjne %1,%2,%3
cjne %10,%11,%3
sjmp %4
%3:
jz %4
} by {
- ; Peephole 125 jump optimization
+ ; Peephole 115.l jump optimization (acc not set)
cjne %1,%2,%3
cjne %10,%11,%3
cjne %12,%13,%3
%3:
jz %4
} by {
- ; Peephole 126 jump optimization
+ ; Peephole 115.m jump optimization (acc not set)
cjne %1,%2,%3
cjne %10,%11,%3
cjne %12,%13,%3
mov r%4,%2
mov @r%4,ar%3
} by {
- ; Peephole 133 removed redundant moves
mov r%1,%2
+ ; Peephole 133 removed redundant moves
inc @r%1
mov ar%3,@r%1
} if notVolatile
mov r%4,%2
mov @r%4,ar%3
} by {
- ; Peephole 134 removed redundant moves
mov r%1,%2
+ ; Peephole 134 removed redundant moves
dec @r%1
mov ar%3,@r%1
} if notVolatile
mov a,r%2
orl a,r%1
} by {
- ; Peephole 135 removed redundant mov
mov r%1,a
+ ; Peephole 135 removed redundant mov
orl a,r%2
}
mov dph,%3
mov a,%1
} by {
- ; Peephole 136 removed redundant moves
mov %1,a
mov dpl,%2
mov dph,%3
+ ; Peephole 136 removed redundant move
} if notVolatile %1
// WTF? Doesn't look sensible to me...
anl ar%1,%2
mov a,r%1
} by {
- ; Peephole 139 removed redundant mov
+ ; Peephole 139.a removed redundant mov
anl a,%2
mov r%1,a
}
orl ar%1,%2
mov a,r%1
} by {
- ; Peephole 140 removed redundant mov
+ ; Peephole 139.b removed redundant mov
orl a,%2
mov r%1,a }
xrl ar%1,%2
mov a,r%1
} by {
- ; Peephole 141 removed redundant mov
+ ; Peephole 139.c removed redundant mov
xrl a,%2
mov r%1,a
}
rlc a
mov acc.0,c
} by {
- ; Peephole 143 converted rlc to rl
+ ; Peephole 143.a converted rlc to rl
rl a
}
rrc a
mov acc.7,c
} by {
- ; Peephole 144 converted rrc to rc
+ ; Peephole 143.b converted rrc to rc
rr a
}
clr c
addc a,%1
} by {
- ; Peephole 145 changed to add without carry
+ ; Peephole 145.a changed to add without carry
add a,%1
}
mov a,%1
addc a,%2
} by {
- ; Peephole 146 changed to add without carry
+ ; Peephole 145.b changed to add without carry
mov a,%1
add a,%2
}
+// 147: Fix compiler output to comply with 8051 instruction set.
replace {
orl r%1,a
} by {
- ; Peephole 147 changed target address mode r%1 to ar%1
+ ; Peephole 147.a changed target address mode r%1 to ar%1
orl ar%1,a
}
replace {
anl r%1,a
} by {
- ; Peephole 148 changed target address mode r%1 to ar%1
+ ; Peephole 147.b changed target address mode r%1 to ar%1
anl ar%1,a
}
replace {
xrl r%1,a
} by {
- ; Peephole 149 changed target address mode r%1 to ar%1
+ ; Peephole 147.c changed target address mode r%1 to ar%1
xrl ar%1,a
}
%9:
ret
} by {
- ; Peephole 150 removed misc moves via dpl before return
+ ; Peephole 150.a removed misc moves via dpl before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 151 removed misc moves via dph, dpl before return
+ ; Peephole 150.b removed misc moves via dph, dpl before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 152 removed misc moves via dph, dpl before return
+ ; Peephole 150.c removed misc moves via dph, dpl before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 153 removed misc moves via dph, dpl, b before return
+ ; Peephole 150.d removed misc moves via dph, dpl, b before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 154 removed misc moves via dph, dpl, b before return
+ ; Peephole 150.e removed misc moves via dph, dpl, b before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 155 removed misc moves via dph, dpl, b before return
+ ; Peephole 150.f removed misc moves via dph, dpl, b before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 156 removed misc moves via dph, dpl, b, a before return
+ ; Peephole 150.g removed misc moves via dph, dpl, b, a before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 157 removed misc moves via dph, dpl, b, a before return
+ ; Peephole 150.h removed misc moves via dph, dpl, b, a before return
%9:
ret
}
%9:
ret
} by {
- ; Peephole 158 removed misc moves via dph, dpl, b, a before return
+ ; Peephole 150.i removed misc moves via dph, dpl, b, a before return
%9:
ret
}
+// peephole 213.a might revert this
replace {
mov %1,#%2
xrl %1,#0x80
sjmp %2
%1:
} by {
- ; Peephole 160 removed sjmp by inverse jump logic
+ ; Peephole 160.a removed sjmp by inverse jump logic
jc %2
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
jc %1
sjmp %2
%1:
} by {
- ; Peephole 161 removed sjmp by inverse jump logic
+ ; Peephole 160.b removed sjmp by inverse jump logic
jnc %2
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
jnz %1
sjmp %2
%1:
} by {
- ; Peephole 162 removed sjmp by inverse jump logic
+ ; Peephole 160.c removed sjmp by inverse jump logic
jz %2
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
jz %1
sjmp %2
%1:
} by {
- ; Peephole 163 removed sjmp by inverse jump logic
+ ; Peephole 160.d removed sjmp by inverse jump logic
jnz %2
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
jnb %3,%1
sjmp %2
%1:
} by {
- ; Peephole 164 removed sjmp by inverse jump logic
+ ; Peephole 160.e removed sjmp by inverse jump logic
jb %3,%2
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
jb %3,%1
sjmp %2
%1:
} by {
- ; Peephole 165 removed sjmp by inverse jump logic
+ ; Peephole 160.f removed sjmp by inverse jump logic
jnb %3,%2
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
mov %1,%2
mov %3,%1
mov %2,%1
} by {
- ; Peephole 166 removed redundant mov
mov %1,%2
mov %3,%1
+ ; Peephole 166 removed redundant mov
} if notVolatile %1 %2
replace {
; Peephole 168 jump optimization
jb %1,%3
%2:
-}
+} labelRefCountChange(%2 -1)
replace {
jb %1,%2
; Peephole 169 jump optimization
jnb %1,%3
%2:
-}
+} labelRefCountChange(%2 -1)
replace {
clr a
; Peephole 170 jump optimization
cjne %1,%2,%4
%3:
-} if labelRefCount %3 1
+} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
replace {
clr a
cjne %1,%2,%4
cjne %9,%10,%4
%3:
-} if labelRefCount %3 2
+} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
replace {
clr a
cjne %9,%10,%4
cjne %11,%12,%4
%3:
-} if labelRefCount %3 3
+} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
replace {
clr a
cjne %11,%12,%4
cjne %13,%14,%4
%3:
-} if labelRefCount %3 4
+} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
replace {
mov r%1,%2
subb a,#0x01
mov %2,a
} by {
- ; Peephole 174 optimized decrement (acc not set to %2, flags undefined)
mov r%1,%2
+ ; Peephole 174.a optimized decrement (acc not set to %2, flags undefined)
dec %2
}
add a,#0x01
mov %2,a
} by {
- ; Peephole 175 optimized increment (acc not set to %2, flags undefined)
mov r%1,%2
+ ; Peephole 174.b optimized increment (acc not set to %2, flags undefined)
inc %2
}
inc %1
mov @r%2,%1
} by {
- ; Peephole 176 optimized increment, removed redundant mov
+ ; Peephole 174.c optimized increment, removed redundant mov
inc @r%2
mov %1,@r%2
} if notVolatile
mov %1,%2
mov %2,%1
} by {
- ; Peephole 177.a removed redundant mov
mov %1,%2
+ ; Peephole 177.a removed redundant mov
} if notVolatile %1 %2
// applies to f.e. scott-add.asm (--model-large)
mov r%1,a
mov a,ar%1
} by {
- ; Peephole 177.b removed redundant mov
mov r%1,a
+ ; Peephole 177.b removed redundant mov
}
// applies to f.e. bug-408972.c
mov %3,%4
mov %2,%1
} by {
- ; Peephole 177.d removed redundant move
mov %1,%2
mov %3,%4
-} if notVolatile(%1 %2),operandsNotRelated(%1 %3)
+ ; Peephole 177.d removed redundant move
+} if notVolatile(%1 %2),operandsNotRelated(%1 %2 %3)
// applies to f.e. bug-607243.c
// also check notVolatile %3, as it will return FALSE if it's @r%1
movx @dptr,a
mov dptr,%1
} by {
- ; Peephole 180.a removed redundant mov to dptr
mov dptr,%1
mov a,%2
movx @dptr,a
+ ; Peephole 180.a removed redundant mov to dptr
}
// volatile xdata char t; t=0x01; t=0x03; t=0x01;
movx @dptr,a
mov dptr,%1
} by {
- ; Peephole 180.b removed redundant mov to dptr
mov dptr,%1
mov a,%2
movx @dptr,a
mov a,%3
movx @dptr,a
+ ; Peephole 180.b removed redundant mov to dptr
}
// saving 1 byte, 0 cycles
anl %1,#%3
} by {
; Peephole 183 avoided anl during execution
- anl %1,#(%2 & %3)
+ anl %1,#(%2&%3)
}
replace {
mul ab
add a,#%2
mov dpl,a
- mov a,b
- addc a,#(%2 >> 8)
+ mov a,#(%2 >> 8)
+ addc a,b
mov dph,a
clr a
movc a,@a+dptr
mov a,#0x0f
anl a,%1
} by {
- ; Peephole 189 removed redundant mov and anl
anl a,#0x0f
mov %1,a
+ ; Peephole 189 removed redundant mov and anl
} if notVolatile %1
// rules 190 & 191 need to be in order
lcall __gptrput
mov a,%1
} by {
- ; Peephole 190 removed redundant mov
mov a,%1
lcall __gptrput
+ ; Peephole 190 removed redundant mov
} if notVolatile %1
replace {
mov b,%4
mov a,%1
} by {
- ; Peephole 191 removed redundant mov
mov %1,a
mov dpl,%2
mov dph,%3
mov b,%4
+ ; Peephole 191 removed redundant mov
} if notVolatile %1
replace {
mov r%1,a
mov @r%2,ar%1
} by {
- ; Peephole 192 used a instead of ar%1 as source
mov r%1,a
+ ; Peephole 192 used a instead of ar%1 as source
mov @r%2,a
}
mov a,%12
cjne %13,%14,%8
sjmp %7
-;%3:
-} if labelRefCount %3 4
+%3:
+} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
cjne %1,%2,%3
mov a,%12
cjne %13,%14,%8
sjmp %7
-;%3:
-} if labelRefCount %3 4
+%3:
+} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
cjne @%1,%2,%3
inc %1
cjne @%1,%14,%8
sjmp %7
-;%3:
-} if labelRefCount %3 4
+%3:
+} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
cjne %1,%2,%3
cjne %10,%11,%8
cjne %13,%14,%8
sjmp %7
-;%3:
-} if labelRefCount %3 4
+%3:
+} if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
replace {
jnz %3
mov a,%9
cjne %10,%11,%8
sjmp %7
-;%3:
-} if labelRefCount %3 3
+%3:
+} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
cjne %1,%2,%3
mov a,%9
cjne %10,%11,%8
sjmp %7
-;%3:
-} if labelRefCount %3 3
+%3:
+} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
cjne @%1,%2,%3
inc %1
cjne @%1,%11,%8
sjmp %7
-;%3:
-} if labelRefCount %3 3
+%3:
+} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
cjne %1,%2,%3
cjne %5,%6,%8
cjne %10,%11,%8
sjmp %7
-;%3:
-} if labelRefCount %3 3
+%3:
+} if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
replace {
jnz %3
mov a,%4
cjne %5,%6,%8
sjmp %7
-;%3:
-} if labelRefCount %3 2
+%3:
+} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne %1,%2,%3
mov a,%4
cjne %5,%6,%8
sjmp %7
-;%3:
-} if labelRefCount %3 2
+%3:
+} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne @%1,%2,%3
inc %1
cjne @%1,%6,%8
sjmp %7
-;%3:
-} if labelRefCount %3 2
+%3:
+} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne %1,%2,%3
%3:
sjmp %8
} by {
- ; Peephole 198 optimized misc jump sequence
+ ; Peephole 198.a optimized misc jump sequence
cjne %1,%2,%8
cjne %5,%6,%8
sjmp %7
-;%3:
-} if labelRefCount %3 2
+%3:
+} if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
replace {
cjne %1,%2,%3
%3:
sjmp %5
} by {
- ; Peephole 199 optimized misc jump sequence
+ ; Peephole 198.b optimized misc jump sequence
cjne %1,%2,%5
sjmp %4
-;%3:
-} if labelRefCount %3 1
+%3:
+} if labelRefCount(%3 1), labelRefCountChange(%3 -1)
replace {
sjmp %1
%1:
} by {
- ; Peephole 200 removed redundant sjmp
+ ; Peephole 200.a removed redundant sjmp
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
sjmp %1
%2:
%1:
} by {
- ; Peephole 201 removed redundant sjmp
+ ; Peephole 200.b removed redundant sjmp
%2:
%1:
-}
+} labelRefCountChange(%1 -1)
replace {
push acc
djnz %1,%4
%2:
%3:
-} if labelRefCount %2 1
+} if labelRefCount(%2 1), labelRefCountChange(%2 -1), labelRefCountChange(%3 -1)
replace {
mov %1,%1
inc r%1
}
+// reverts peephole 159? asx8051 cannot handle, too complex?
replace {
mov %1,#(( %2 >> 8 ) ^ 0x80)
} by {
mov a,%2
add a,%1
} by {
- ; Peephole 214 reduced some extra moves
mov %1,a
+ ; Peephole 214 reduced some extra moves
add a,%2
} if operandsNotSame
dec r%1
mov @r%1,a
} by {
- ; Peephole 216 simplified clear (2bytes)
mov r%1,%2
clr a
+ ; Peephole 216.a simplified clear (2 bytes)
mov @r%1,a
inc r%1
mov @r%1,a
dec r%1
mov @r%1,a
} by {
- ; Peephole 217 simplified clear (3bytes)
mov r%1,%2
clr a
+ ; Peephole 216.b simplified clear (3 bytes)
mov @r%1,a
inc r%1
mov @r%1,a
dec r%1
mov @r%1,a
} by {
- ; Peephole 218 simplified clear (4bytes)
mov r%1,%2
clr a
+ ; Peephole 216.c simplified clear (4 bytes)
mov @r%1,a
inc r%1
mov @r%1,a
clr a
movx @dptr,a
} by {
- ; Peephole 219 removed redundant clear
+ ; Peephole 219.a removed redundant clear
clr a
movx @dptr,a
mov dptr,%1
clr a
movx @dptr,a
} by {
- ; Peephole 219.a removed redundant clear
clr a
movx @dptr,a
mov dptr,%1
movx @dptr,a
mov dptr,%2
+ ; Peephole 219.b removed redundant clear
movx @dptr,a
}
mov dpl,%1
mov dph,%2
} by {
- ; Peephole 223 removed redundant dph/dpl moves
mov %1,dpl
mov %2,dph
+ ; Peephole 223.a removed redundant dph/dpl moves
} if notVolatile %1 %2
replace {
mov dpl,%1
mov dph,(%1 + 1)
} by {
- ; Peephole 224 removed redundant dph/dpl moves
mov %1,dpl
mov (%1 + 1),dph
+ ; Peephole 223.b removed redundant dph/dpl moves
} if notVolatile %1
replace {
mov b,%4
mov a,%1
} by {
- ; Peephole 225 removed redundant move to acc
mov a,%1
movx @dptr,a
mov dpl,%2
mov dph,%3
mov b,%4
+ ; Peephole 225 removed redundant move to acc
} if notVolatile %1
replace {
clr a
- movx @dptr,a
- inc dptr
- movx @dptr,a
- inc dptr
+ movx @%1,a
+ inc %1
+ movx @%1,a
+ inc %1
clr a
} by {
- ; Peephole 226 removed unnecessary clr
clr a
- movx @dptr,a
- inc dptr
- movx @dptr,a
- inc dptr
+ movx @%1,a
+ inc %1
+ movx @%1,a
+ inc %1
+ ; Peephole 226 removed unnecessary clr
}
replace {
lcall __decdptr
movx @dptr,a
} by {
- ; Peephole 227 replaced inefficient 32 bit clear
mov dptr,#%1
clr a
+ ; Peephole 227.a replaced inefficient 32 bit clear
movx @dptr,a
inc dptr
movx @dptr,a
mov a,#%2
movx @dptr,a
} by {
- ; Peephole 228 replaced inefficient 32 constant
mov dptr,#%1
+ ; Peephole 227.b replaced inefficient 32 constant
mov a,#%2
movx @dptr,a
inc dptr
lcall __decdptr
movx @dptr,a
} by {
- ; Peephole 229 replaced inefficient 16 bit clear
mov dptr,#%1
clr a
+ ; Peephole 227.c replaced inefficient 16 bit clear
movx @dptr,a
inc dptr
movx @dptr,a
mov a,#%2
movx @dptr,a
} by {
- ; Peephole 230 replaced inefficient 16 bit constant
mov dptr,#%1
+ ; Peephole 227.d replaced inefficient 16 bit constant
mov a,#%2
movx @dptr,a
inc dptr
mov dptr,#%1
}
-// this last peephole often removes the last mov from 227-230
+// this last peephole often removes the last mov from 227.a - 227.d
replace {
mov dptr,#%1
mov dptr,#%2
} by {
- ; Peephole 231 removed redundant mov to dptr
+ ; Peephole 227.e removed redundant mov to dptr
mov dptr,#%2
}
%2:
ret
} by {
- ; Peephole 234 loading dpl directly from a(ccumulator), r%1 not set
+ ; Peephole 234.a loading dpl directly from a(ccumulator), r%1 not set
mov dpl,a
%2:
ret
%3:
ret
} by {
- ; Peephole 235 loading dph directly from a(ccumulator), r%1 not set
+ ; Peephole 234.b loading dph directly from a(ccumulator), r%1 not set
mov dpl,r%2
mov dph,a
%3:
mov %3,%4
%1:
ret
-}
+} labelRefCountChange(%1 -1)
replace {
sjmp %1
mov dph,%6
%1:
ret
-}
+} labelRefCountChange(%1 -1)
// applies to f.e. device/lib/log10f.c
replace {
addc a,%1
}
-// peepholes 241.a to 241.c and 241.d to 241.f need to be in order
+// peepholes 241.a to 241.d and 241.e to 241.h need to be in order
replace {
- cjne r%1,#%2,%3
- cjne r%4,#%5,%3
- cjne r%6,#%7,%3
- cjne r%8,#%9,%3
+ cjne r%2,#%3,%0
+ cjne r%4,#%5,%0
+ cjne r%6,#%7,%0
+ cjne r%8,#%9,%0
mov a,#0x01
- sjmp %10
-%3:
+ sjmp %1
+%0:
clr a
-%10:
+%1:
} by {
; Peephole 241.a optimized compare
clr a
- cjne r%1,#%2,%3
- cjne r%4,#%5,%3
- cjne r%6,#%7,%3
- cjne r%8,#%9,%3
+ cjne r%2,#%3,%0
+ cjne r%4,#%5,%0
+ cjne r%6,#%7,%0
+ cjne r%8,#%9,%0
inc a
-%3:
-%10:
-}
+%0:
+%1:
+} labelRefCountChange(%1 -1)
-// applies to f.e. time.c
+// applies to generic pointer compare
replace {
- cjne r%1,#%2,%3
- cjne r%4,#%5,%3
+ cjne r%2,#%3,%0
+ cjne r%4,#%5,%0
+ cjne r%6,#%7,%0
mov a,#0x01
- sjmp %6
-%3:
+ sjmp %1
+%0:
clr a
-%6:
+%1:
} by {
; Peephole 241.b optimized compare
clr a
- cjne r%1,#%2,%3
- cjne r%4,#%5,%3
+ cjne r%2,#%3,%0
+ cjne r%4,#%5,%0
+ cjne r%6,#%7,%0
inc a
-%3:
-%6:
-}
+%0:
+%1:
+} labelRefCountChange(%1 -1)
-// applies to f.e. malloc.c
+// applies to f.e. time.c
replace {
- cjne r%1,#%2,%3
+ cjne r%2,#%3,%0
+ cjne r%4,#%5,%0
mov a,#0x01
- sjmp %4
-%3:
+ sjmp %1
+%0:
clr a
-%4:
+%1:
} by {
; Peephole 241.c optimized compare
clr a
- cjne r%1,#%2,%3
+ cjne r%2,#%3,%0
+ cjne r%4,#%5,%0
inc a
-%3:
-%4:
-}
+%0:
+%1:
+} labelRefCountChange(%1 -1)
+
+// applies to f.e. malloc.c
+replace {
+ cjne r%2,#%3,%0
+ mov a,#0x01
+ sjmp %1
+%0:
+ clr a
+%1:
+} by {
+ ; Peephole 241.d optimized compare
+ clr a
+ cjne r%2,#%3,%0
+ inc a
+%0:
+%1:
+} labelRefCountChange(%1 -1)
// applies to f.e. j = (k!=0x1000);
// with volatile idata long k;
replace {
- cjne @r%1,#%2,%3
- inc r%1
- cjne @r%1,#%4,%3
- inc r%1
- cjne @r%1,#%5,%3
- inc r%1
- cjne @r%1,#%6,%3
+ cjne @r%0,#%3,%1
+ inc r%0
+ cjne @r%0,#%4,%1
+ inc r%0
+ cjne @r%0,#%5,%1
+ inc r%0
+ cjne @r%0,#%6,%1
mov a,#0x01
- sjmp %7
-%3:
+ sjmp %2
+%1:
clr a
-%7:
+%2:
} by {
- ; Peephole 241.d optimized compare
+ ; Peephole 241.e optimized compare
clr a
- cjne @r%1,#%2,%3
- inc r%1
- cjne @r%1,#%4,%3
- inc r%1
- cjne @r%1,#%5,%3
- inc r%1
- cjne @r%1,#%6,%3
+ cjne @r%0,#%3,%1
+ inc r%0
+ cjne @r%0,#%4,%1
+ inc r%0
+ cjne @r%0,#%5,%1
+ inc r%0
+ cjne @r%0,#%6,%1
inc a
-%3:
-%7:
-}
+%1:
+%2:
+} labelRefCountChange(%2 -1)
+
+// applies to f.e. j = (p!=NULL);
+// with volatile idata char *p;
+replace {
+ cjne @r%0,#%3,%1
+ inc r%0
+ cjne @r%0,#%4,%1
+ inc r%0
+ cjne @r%0,#%5,%1
+ mov a,#0x01
+ sjmp %2
+%1:
+ clr a
+%2:
+} by {
+ ; Peephole 241.f optimized compare
+ clr a
+ cjne @r%0,#%3,%1
+ inc r%0
+ cjne @r%0,#%4,%1
+ inc r%0
+ cjne @r%0,#%5,%1
+ inc a
+%1:
+%2:
+} labelRefCountChange(%2 -1)
// applies to f.e. j = (k!=0x1000);
// with volatile idata int k;
replace {
- cjne @r%1,#%2,%3
- inc r%1
- cjne @r%1,#%4,%3
+ cjne @r%0,#%3,%1
+ inc r%0
+ cjne @r%0,#%4,%1
mov a,#0x01
- sjmp %7
-%3:
+ sjmp %2
+%1:
clr a
-%7:
+%2:
} by {
- ; Peephole 241.e optimized compare
+ ; Peephole 241.g optimized compare
clr a
- cjne @r%1,#%2,%3
- inc r%1
- cjne @r%1,#%4,%3
+ cjne @r%0,#%3,%1
+ inc r%0
+ cjne @r%0,#%4,%1
inc a
-%3:
-%7:
-}
+%1:
+%2:
+} labelRefCountChange(%2 -1)
// applies to f.e. vprintf.asm (--stack-auto)
replace {
- cjne @r%1,#%2,%3
+ cjne @r%0,#%3,%1
mov a,#0x01
- sjmp %7
-%3:
+ sjmp %2
+%1:
clr a
-%7:
+%2:
} by {
- ; Peephole 241.f optimized compare
+ ; Peephole 241.h optimized compare
clr a
- cjne @r%1,#%2,%3
+ cjne @r%0,#%3,%1
inc a
-%3:
-%7:
-}
+%1:
+%2:
+} labelRefCountChange(%2 -1)
// applies to f.e. scott-bool1.c
replace {
%1:
jz %4
} by {
- ; Peephole 242.a avoided branch jnz to jz
jnz %1
mov %2,%3
+ ; Peephole 242.a avoided branch jnz to jz
jz %4
%1:
} if labelRefCount %1 1
%1:
jz %4
} by {
- ; Peephole 242.b avoided branch jnz to jz
jnz %1
mov %2,%3
orl a,%5
+ ; Peephole 242.b avoided branch jnz to jz
jz %4
%1:
} if labelRefCount %1 1
%1:
jz %4
} by {
- ; Peephole 242.c avoided branch jnz to jz
jnz %1
mov %2,%3
orl a,%5
orl a,%6
orl a,%7
+ ; Peephole 242.c avoided branch jnz to jz
jz %4
%1:
} if labelRefCount %1 1
inc %4
%3:
sjmp %5
-} if labelInRange
+} if labelInRange(), labelRefCountChange(%3 -1), labelRefCountChange(%5 1)
// applies to f.e. simplefloat.c (saving 1 cycle)
replace {
mov r%1,a
mov dpl,r%1
} by {
- ; Peephole 244.c loading dpl from a instead of r%1
mov r%1,a
+ ; Peephole 244.c loading dpl from a instead of r%1
mov dpl,a
}
mov r%1,a
mov dph,r%1
} by {
- ; Peephole 244.d loading dph from a instead of r%1
mov r%1,a
+ ; Peephole 244.d loading dph from a instead of r%1
mov dph,a
}
clr a
rlc a
mov r%1,a
-} if labelRefCount %2 1
+} if labelRefCount(%2 1), labelRefCountChange(%2 -1)
// this one will not be triggered if 245 is present
// please remove 245 if 245.a 245.b are found to be safe
} by {
; Peephole 245.a optimized conditional jump (r%1 and acc not set!)
jc %3
-} if labelRefCount %2 1
+} if labelRefCount(%2 1), labelRefCountChange(%2 -1)
// this one will not be triggered if 245 is present
// please remove 245 if 245.a 245.b are found to be safe
} by {
; Peephole 245.b optimized conditional jump (r%1 and acc not set!)
jnc %3
-} if labelRefCount %2 1
+} if labelRefCount(%2 1), labelRefCountChange(%2 -1)
// rules 246.x apply to f.e. bitfields.c
anl a,#%3
movx @dptr,a
} by {
- ; Peephole 246.a combined clr/clr
mov dptr,#%1
movx a,@dptr
+ ; Peephole 246.a combined clr/clr
anl a,#%2&%3
movx @dptr,a
} if notVolatile %1
orl a,#%3
movx @dptr,a
} by {
- ; Peephole 246.b combined set/set
mov dptr,#%1
movx a,@dptr
+ ; Peephole 246.b combined set/set
orl a,#%2|%3
movx @dptr,a
} if notVolatile %1
anl a,#%3
movx @dptr,a
} by {
- ; Peephole 246.c combined set/clr
mov dptr,#%1
movx a,@dptr
orl a,#%2
+ ; Peephole 246.c combined set/clr
anl a,#%3
movx @dptr,a
} if notVolatile %1
orl a,#%3
movx @dptr,a
} by {
- ; Peephole 246.d combined clr/set
mov dptr,#%1
movx a,@dptr
anl a,#%2
+ ; Peephole 246.d combined clr/set
orl a,#%3
movx @dptr,a
} if notVolatile %1
anl a,#%4
movx @dptr,a
} by {
- ; Peephole 246.e combined set/clr/clr
mov dptr,#%1
movx a,@dptr
orl a,#%2
+ ; Peephole 246.e combined set/clr/clr
anl a,#%3&%4
movx @dptr,a
} if notVolatile %1
orl a,#%4
movx @dptr,a
} by {
- ; Peephole 246.f combined set/clr/set
mov dptr,#%1
movx a,@dptr
orl a,#%2
anl a,#%3
+ ; Peephole 246.f combined set/clr/set
orl a,#%4
movx @dptr,a
} if notVolatile %1
anl a,#%4
movx @dptr,a
} by {
- ; Peephole 246.g combined clr/set/clr
mov dptr,#%1
movx a,@dptr
anl a,#%2
orl a,#%3
+ ; Peephole 246.g combined clr/set/clr
anl a,#%4
movx @dptr,a
} if notVolatile %1
orl a,#%4
movx @dptr,a
} by {
- ; Peephole 246.h combined clr/set/set
mov dptr,#%1
movx a,@dptr
anl a,#%2
+ ; Peephole 246.h combined clr/set/set
orl a,#%3|%4
movx @dptr,a
} if notVolatile %1
anl a,#%3
mov @r%5,a
} by {
- ; Peephole 247.a combined clr/clr
mov r%5,#%1
mov a,@r%5
+ ; Peephole 247.a combined clr/clr
anl a,#%2&%3
mov @r%5,a
} if notVolatile %1
orl a,#%3
mov @r%5,a
} by {
- ; Peephole 247.b combined set/set
mov r%5,#%1
mov a,@r%5
+ ; Peephole 247.b combined set/set
orl a,#%2|%3
mov @r%5,a
} if notVolatile %1
anl a,#%3
mov @r%5,a
} by {
- ; Peephole 247.c combined set/clr
mov r%5,#%1
mov a,@r%5
orl a,#%2
+ ; Peephole 247.c combined set/clr
anl a,#%3
mov @r%5,a
} if notVolatile %1
orl a,#%3
mov @r%5,a
} by {
- ; Peephole 247.d combined clr/set
mov r%5,#%1
mov a,@r%5
anl a,#%2
+ ; Peephole 247.d combined clr/set
orl a,#%3
mov @r%5,a
} if notVolatile %1
anl a,#%4
mov @r%5,a
} by {
- ; Peephole 247.e combined set/clr/clr
mov r%5,#%1
mov a,@r%5
orl a,#%2
+ ; Peephole 247.e combined set/clr/clr
anl a,#%3&%4
mov @r%5,a
} if notVolatile %1
orl a,#%4
mov @r%5,a
} by {
- ; Peephole 247.f combined set/clr/set
mov r%5,#%1
mov a,@r%5
orl a,#%2
anl a,#%3
+ ; Peephole 247.f combined set/clr/set
orl a,#%4
mov @r%5,a
} if notVolatile %1
anl a,#%4
mov @r%5,a
} by {
- ; Peephole 247.g combined clr/set/clr
mov r%5,#%1
mov a,@r%5
anl a,#%2
orl a,#%3
+ ; Peephole 247.g combined clr/set/clr
anl a,#%4
mov @r%5,a
} if notVolatile %1
orl a,#%4
mov @r%5,a
} by {
- ; Peephole 247.h combined clr/set/set
mov r%5,#%1
mov a,@r%5
anl a,#%2
+ ; Peephole 247.h combined clr/set/set
orl a,#%3|%4
mov @r%5,a
} if notVolatile %1
orl a,r%2
movx @dptr,a
} by {
- ; Peephole 248.a optimized or to xdata
mov dptr,%1
movx a,@dptr
mov r%2,a
+ ; Peephole 248.a optimized or to xdata
orl a,%3
movx @dptr,a
}
anl a,r%2
movx @dptr,a
} by {
- ; Peephole 248.b optimized and to xdata
mov dptr,%1
movx a,@dptr
mov r%2,a
+ ; Peephole 248.b optimized and to xdata
anl a,%3
movx @dptr,a
}
xrl a,r%2
movx @dptr,a
} by {
- ; Peephole 248.c optimized xor to xdata
mov dptr,%1
movx a,@dptr
mov r%2,a
+ ; Peephole 248.c optimized xor to xdata
xrl a,%3
movx @dptr,a
}
orl a,%5
movx @dptr,a
} by {
- ; Peephole 248.d optimized or/and/or to volatile xdata
mov dptr,%1
movx a,@dptr
+ ; Peephole 248.d optimized or/and/or to volatile xdata
orl a,%3
movx @dptr,a
movx a,@dptr
anl a,%5
movx @dptr,a
} by {
- ; Peephole 248.e optimized and/or/and to volatile xdata
mov dptr,%1
movx a,@dptr
+ ; Peephole 248.e optimized and/or/and to volatile xdata
anl a,%3
movx @dptr,a
movx a,@dptr
anl a,%4
movx @dptr,a
} by {
- ; Peephole 248.f optimized or/and to volatile xdata
mov dptr,%1
movx a,@dptr
+ ; Peephole 248.f optimized or/and to volatile xdata
orl a,%3
movx @dptr,a
movx a,@dptr
orl a,%4
movx @dptr,a
} by {
- ; Peephole 248.g optimized and/or to volatile xdata
mov dptr,%1
movx a,@dptr
+ ; Peephole 248.g optimized and/or to volatile xdata
anl a,%3
movx @dptr,a
movx a,@dptr
xrl a,%4
movx @dptr,a
} by {
- ; Peephole 248.h optimized xor/xor to volatile xdata
mov dptr,%1
movx a,@dptr
+ ; Peephole 248.h optimized xor/xor to volatile xdata
xrl a,%3
movx @dptr,a
movx a,@dptr
orl a,%5
movx @dptr,a
} by {
- ; Peephole 248.i optimized or/and/or to xdata bitfield
mov dptr,%1
movx a,@dptr
orl a,%3
movx @dptr,a
+ ; Peephole 248.i optimized or/and/or to xdata bitfield
movx a,@dptr
anl a,%4
movx @dptr,a
anl a,%5
movx @dptr,a
} by {
- ; Peephole 248.j optimized and/or/and to xdata bitfield
mov dptr,%1
movx a,@dptr
anl a,%3
movx @dptr,a
+ ; Peephole 248.j optimized and/or/and to xdata bitfield
movx a,@dptr
orl a,%4
movx @dptr,a
anl a,%4
movx @dptr,a
} by {
- ; Peephole 248.k optimized or/and to xdata bitfield
mov dptr,%1
movx a,@dptr
orl a,%3
movx @dptr,a
+ ; Peephole 248.k optimized or/and to xdata bitfield
movx a,@dptr
anl a,%4
movx @dptr,a
orl a,%4
movx @dptr,a
} by {
- ; Peephole 248.l optimized and/or to xdata bitfield
mov dptr,%1
movx a,@dptr
anl a,%3
movx @dptr,a
+ ; Peephole 248.l optimized and/or to xdata bitfield
movx a,@dptr
orl a,%4
movx @dptr,a
xrl a,%4
movx @dptr,a
} by {
- ; Peephole 248.m optimized xor/xor to xdata bitfield
mov dptr,%1
movx a,@dptr
xrl a,%3
movx @dptr,a
+ ; Peephole 248.m optimized xor/xor to xdata bitfield
movx a,@dptr
xrl a,%4
movx @dptr,a
%1:
} by {
; Peephole 249.a jump optimization
-} if labelRefCount %1 1
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1)
replace {
jz %1
%1:
} by {
; Peephole 249.b jump optimization
-} if labelRefCount %1 1
+} if labelRefCount(%1 1), labelRefCountChange(%1 -1)
// This allows non-interrupt and interrupt code to safely compete
jbc %1,%3
sjmp %2
%3:
-} if labelRefCount %3 0
+} if labelRefCount(%3 0), labelRefCountChange(%3 1)
replace {
jb %1,%2
} by {
; Peephole 251.a replaced ljmp to ret with ret
ret
-} if labelIsReturnOnly
+} if labelIsReturnOnly(), labelRefCountChange(%5 -1)
// not before peephole 250.b
replace {
} by {
; Peephole 251.b replaced sjmp to ret with ret
ret
-} if labelIsReturnOnly
+} if labelIsReturnOnly(), labelRefCountChange(%5 -1)
// applies to shifts.c and when accessing arrays with an unsigned integer index
// saving 1 byte, 2 cycles
} by {
; Peephole 253.b replaced lcall/ret with ljmp
ljmp %1
+ ;
} if labelRefCount %2 0
// applies to f.e. scott-bool1.c
mov a,%1
add a,acc
} by {
- ; Peephole 254 optimized left shift
mov a,%1
+ ; Peephole 254 optimized left shift
add a,%1
} if notVolatile %1
%2:
clr c
} by {
- ; Peephole 256.a removed redundant clr c
jc %1
%2:
+ ; Peephole 256.a removed redundant clr c
} if labelRefCount %2 0
// applies to f.e. logf.c
%2:
clr a
} by {
- ; Peephole 256.b removed redundant clr a
jnz %1
%2:
+ ; Peephole 256.b removed redundant clr a
} if labelRefCount %2 0
// applies to f.e. bug-905492.c
%2:
mov %3,#0x00
} by {
- ; Peephole 256.c loading %3 with zero from a
jnz %1
%2:
+ ; Peephole 256.c loading %3 with zero from a
mov %3,a
} if labelRefCount %2 0
mov %4,%5
mov %3,#0x00
} by {
- ; Peephole 256.d loading %3 with zero from a
jnz %1
%2:
mov %4,%5
+ ; Peephole 256.d loading %3 with zero from a
mov %3,a
} if labelRefCount(%2 0),operandsNotRelated('a' %4)
mov %6,%7
mov %3,#0x00
} by {
- ; Peephole 256.e loading %3 with zero from a
jnz %1
%2:
mov %4,%5
mov %6,%7
+ ; Peephole 256.e loading %3 with zero from a
mov %3,a
} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6)
mov %8,%9
mov %3,#0x00
} by {
- ; Peephole 256.f loading %2 with zero from a
jnz %1
%2:
mov %4,%5
mov %6,%7
mov %8,%9
+ ; Peephole 256.f loading %2 with zero from a
mov %3,a
} if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
orl %1,#0x01
%3:
} by {
- ; Peephole 258.a optimized bitbanging
mov a,%1
+ ; Peephole 258.a optimized bitbanging
mov c,%2
addc a,%1
mov %1,a
%4:
%3:
-} if notVolatile %1
+} if notVolatile(%1), labelRefCountChange(%3 -1)
// in_byte<<=1; if(in_bit) in_byte|=1;
replace {
orl ar%1,#0x01
%3:
} by {
- ; Peephole 258.b optimized bitbanging
mov a,r%1
+ ; Peephole 258.b optimized bitbanging
mov c,%2
addc a,r%1
mov r%1,a
%4:
%3:
-}
+} labelRefCountChange(%3 -1)
// in_byte>>=1; if(in_bit) in_byte|=0x80;
replace {
orl %1,#0x80
%3:
} by {
- ; Peephole 258.c optimized bitbanging
mov a,%1
+ ; Peephole 258.c optimized bitbanging
mov c,%2
rrc a
mov %1,a
%4:
%3:
-} if notVolatile %1
+} if notVolatile(%1), labelRefCountChange(%3 -1)
// in_byte>>=1; if(in_bit) in_byte|=0x80;
replace {
orl ar%1,#0x80
%3:
} by {
- ; Peephole 258.d optimized bitbanging
mov a,r%1
+ ; Peephole 258.d optimized bitbanging
mov c,%2
rrc a
mov r%1,a
%4:
%3:
-}
+} labelRefCountChange(%3 -1)
// out_bit=out_byte&0x80; out_byte<<=1;
// helps f.e. writing data on a 3-wire (SPI) bus
add a,%1
mov %1,a
} by {
- ; Peephole 258.e optimized bitbanging
mov a,%1
+ ; Peephole 258.e optimized bitbanging
add a,%1
mov %2,c
mov %1,a
rrc a
mov %1,a
} by {
- ; Peephole 258.f optimized bitbanging
mov a,%1
+ ; Peephole 258.f optimized bitbanging
clr c
rrc a
mov %2,c
mov %1,a
} if notVolatile %1
-// Peepholes 259.x are not compatible with peepholex 250.x
-// Peepholes 250.x add jumps to a previously unused label. As the
+// Peepholes 259.x rely on the correct labelRefCount. Otherwise they are
+// not compatible with peepholes 250.x
+// Peepholes 250.x add jumps to a previously unused label. If the
// labelRefCount is not increased, peepholes 259.x are (mistakenly) applied.
// (Mail on sdcc-devel 2004-10-25)
-// Note: Peepholes 193..199, 251 remove jumps to previously used labels without
-// decreasing labelRefCount (less dangerous - this f.e. leads to 253.c being
-// applied instead of 253.b))
//
// applies to f.e. vprintf.c
-//replace {
-// sjmp %1
-//%2:
-// ret
-//} by {
-// sjmp %1
-// ; Peephole 259.a removed redundant label %2 and ret
-// ;
-//} if labelRefCount %2 0
+replace {
+ sjmp %1
+%2:
+ ret
+} by {
+ sjmp %1
+ ; Peephole 259.a removed redundant label %2 and ret
+ ;
+} if labelRefCount %2 0
// applies to f.e. gets.c
-//replace {
-// ljmp %1
-//%2:
-// ret
-//} by {
-// ljmp %1
-// ; Peephole 259.b removed redundant label %2 and ret
-// ;
-//} if labelRefCount %2 0
+replace {
+ ljmp %1
+%2:
+ ret
+} by {
+ ljmp %1
+ ; Peephole 259.b removed redundant label %2 and ret
+ ;
+} if labelRefCount %2 0
// optimizing jumptables
// Please note: to enable peephole 260.x you currently have to set
mov acc.0,c
mov %1,a
} by {
- ; Peephole 261.a optimized left rol
mov a,%1
rlc a
+ ; Peephole 261.a optimized left rol
xch a,%2
rlc a
xch a,%2
mov acc.7,c
mov %1,a
} by {
- ; Peephole 261.b optimized right rol
mov a,%1
rrc a
+ ; Peephole 261.b optimized right rol
xch a,%2
rrc a
xch a,%2
mov acc.7,c
mov %1,a
}
+
+replace {
+ cpl c
+ cpl c
+} by {
+ ; Peephole 262 removed redundant cpl c
+}
+
+// should be one of the last peepholes
+replace{
+%1:
+} by {
+ ; Peephole 300 removed redundant label %1
+} if labelRefCount(%1 0)