5 // ; Peephole 1 removed pop %1 push %1 (not push pop)
13 // ; Peephole 2 removed pop %1 push %1 (not push pop)
18 // added by Jean Louis VERN for
24 ; Peephole 2.a removed redundant xch xch
32 ; Peephole 3.a changed mov to clr
42 ; Peephole 3.b changed mov to clr
48 // saving 1 byte, loosing 1 cycle but maybe allowing peephole 3.b to start
53 ; Peephole 3.c changed mov to clr
69 ; Peephole 3.d removed redundant clr
83 ; Peephole 3.e removed redundant clr
99 ; Peephole 3.f removed redundant clr
117 ; Peephole 3.g removed redundant clr
129 ; Peephole 3.h changed mov %3,#0x00 to ...,a
144 ; Peephole 3.i changed mov %4,#0x00 to ...,a
161 ; Peephole 3.j changed mov %5,#0x00 to ...,a
180 ; Peephole 3.k changed mov %6,#0x00 to ...,a
192 ; Peephole 100 removed redundant mov
206 ; Peephole 101 removed redundant mov
221 ; Peephole 102 removed redundant mov to %1
230 } if notVolatile(%1), labelRefCount(%3 1)
243 ; Peephole 103 removed redundant mov to %1
252 } if labelRefCount(%3 1)
254 // Does not seem to be triggered anymore
261 // ; Peephole 104 optimized increment (acc not set to r%1, flags undefined)
271 ; Peephole 105 removed redundant mov
281 ; Peephole 106 removed redundant mov
288 ; Peephole 107 removed redundant ljmp
290 } if labelRefCountChange(%1 -1)
297 ; Peephole 108.a removed ljmp by inverse jump logic
300 } if labelInRange(), labelRefCountChange(%1 -1)
307 ; Peephole 108.b removed ljmp by inverse jump logic
310 } if labelInRange(), labelRefCountChange(%1 -1)
317 ; Peephole 108.c removed ljmp by inverse jump logic
320 } if labelInRange(), labelRefCountChange(%1 -1)
327 ; Peephole 108.d removed ljmp by inverse jump logic
330 } if labelInRange(), labelRefCountChange(%2 -1)
337 ; Peephole 108.e removed ljmp by inverse jump logic
340 } if labelInRange(), labelRefCountChange(%2 -1)
346 ; Peephole 112.b changed ljmp to sjmp
359 ; Peephole 113.a optimized misc sequence
364 } if labelRefCount %3 1
375 ; Peephole 113.b optimized misc sequence
381 } if labelRefCount %3 2
390 ; Peephole 115.a jump optimization (acc not set)
394 } if labelRefCount %3 1
403 ; Peephole 115.b jump optimization
406 } if labelRefCountChange(%2 -1)
416 ; Peephole 115.c jump optimization (acc not set)
421 } if labelRefCount %3 2
432 ; Peephole 115.d jump optimization (acc not set)
438 } if labelRefCount %3 3
450 ; Peephole 115.e jump optimization (acc not set)
457 } if labelRefCount %3 4
466 ; Peephole 115.f jump optimization (acc not set)
469 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
479 ; Peephole 115.g jump optimization (acc not set)
483 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
494 ; Peephole 115.h jump optimization (acc not set)
499 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
511 ; Peephole 115.i jump optimization (acc not set)
517 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
526 ; Peephole 115.j jump optimization (acc not set)
530 } if labelRefCount %3 1
540 ; Peephole 115.k jump optimization (acc not set)
545 } if labelRefCount %3 2
556 ; Peephole 115.l jump optimization (acc not set)
562 } if labelRefCount %3 3
574 ; Peephole 115.m jump optimization (acc not set)
581 } if labelRefCount %3 4
594 ; Peephole 127 removed misc sequence
596 } if labelRefCount %3 0
603 ; Peephole 128 jump optimization
607 // applies to: bug-524691.c --model-large: while (uRight - uLeft > 1)
613 ; Peephole 129.a jump optimization
617 // applies to: _fsdiv.c --xstack: if (mant1 < mant2)
624 ; Peephole 129.b optimized condition
629 // applies to: time.c --xstack: while((days += (LEAP_YEAR(year) ? 366 : 365)) <= epoch)
637 ; Peephole 129.c optimized condition
641 } if notVolatile %1 %2
643 // applies to: _memmove.c --xstack: if (((int)src < (int)dst) && ((((int)src)+acount) > (int)dst))
652 ; Peephole 129.d optimized condition
657 } if notVolatile %1 %2 %3
662 ; Peephole 130 changed target address mode r%1 to ar%1
672 ; Peephole 131 optimized decrement (not caring for c)
677 // ideally the optimizations of rules 132.x should be done in genCmpXX
684 ; Peephole 132.a optimized genCmpGt by inverse logic (acc differs)
688 } if operandsLiteral(%1)
696 ; Peephole 132.b optimized genCmpGt by inverse logic (acc differs)
700 } if operandsLiteral(%1)
708 ; Peephole 132.c optimized genCmpGt by inverse logic (acc differs)
712 } if operandsLiteral(%1)
720 ; Peephole 132.d optimized genCmpGt by inverse logic
724 } if operandsNotRelated('0x00' %2), operandsLiteral(%2)
732 ; Peephole 132.e optimized genCmpLt by inverse logic (carry differs)
736 } if operandsNotRelated('0x00' %2), operandsLiteral(%2)
744 ; Peephole 132.f optimized genCmpLt by inverse logic (carry differs)
748 } if operandsNotRelated('0x00' %2), operandsLiteral(%2)
759 ; Peephole 133 removed redundant moves
772 ; Peephole 134 removed redundant moves
783 ; Peephole 135 removed redundant mov
796 ; Peephole 136 removed redundant move
799 // WTF? Doesn't look sensible to me...
809 // ; Peephole 137 optimized misc jump sequence
813 //} if labelRefCount %4 1
824 // ; Peephole 138 optimized misc jump sequence
829 //} if labelRefCount %4 1
836 ; Peephole 139.a removed redundant mov
846 ; Peephole 139.b removed redundant mov
855 ; Peephole 139.c removed redundant mov
860 // applies to genlshOne
867 ; Peephole 140 removed redundant mov
878 ; Peephole 142 removed redundant moves
887 ; Peephole 143.a converted rlc to rl
895 ; Peephole 143.b converted rrc to rc
903 ; Peephole 145.a changed to add without carry
912 ; Peephole 145.b changed to add without carry
917 // 147: Fix compiler output to comply with 8051 instruction set.
921 ; Peephole 147.a changed target address mode r%1 to ar%1
928 ; Peephole 147.b changed target address mode r%1 to ar%1
935 ; Peephole 147.c changed target address mode r%1 to ar%1
945 ; Peephole 150.a removed misc moves via dpl before return
958 ; Peephole 150.b removed misc moves via dph, dpl before return
970 ; Peephole 150.c removed misc moves via dph, dpl before return
985 ; Peephole 150.d removed misc moves via dph, dpl, b before return
998 ; Peephole 150.e removed misc moves via dph, dpl, b before return
1012 ; Peephole 150.f removed misc moves via dph, dpl, b before return
1029 ; Peephole 150.g removed misc moves via dph, dpl, b, a before return
1044 ; Peephole 150.h removed misc moves via dph, dpl, b, a before return
1058 ; Peephole 150.i removed misc moves via dph, dpl, b, a before return
1063 // peephole 213.a might revert this
1068 ; Peephole 159 avoided xrl during execution
1077 ; Peephole 160.a removed sjmp by inverse jump logic
1080 } if labelRefCountChange(%1 -1)
1087 ; Peephole 160.b removed sjmp by inverse jump logic
1090 } if labelRefCountChange(%1 -1)
1097 ; Peephole 160.c removed sjmp by inverse jump logic
1100 } if labelRefCountChange(%1 -1)
1107 ; Peephole 160.d removed sjmp by inverse jump logic
1110 } if labelRefCountChange(%1 -1)
1117 ; Peephole 160.e removed sjmp by inverse jump logic
1120 } if labelRefCountChange(%1 -1)
1127 ; Peephole 160.f removed sjmp by inverse jump logic
1130 } if labelRefCountChange(%1 -1)
1139 ; Peephole 166 removed redundant mov
1140 } if notVolatile %1 %2
1147 ; Peephole 167 removed redundant bit moves (c not set to %1)
1156 ; Peephole 168 jump optimization
1159 } if labelRefCountChange(%2 -1)
1166 ; Peephole 169 jump optimization
1169 } if labelRefCountChange(%2 -1)
1178 ; Peephole 170 jump optimization
1181 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
1191 ; Peephole 171 jump optimization
1195 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
1206 ; Peephole 172 jump optimization
1211 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
1223 ; Peephole 173 jump optimization
1229 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
1239 ; Peephole 174.a optimized decrement (acc not set to %2, flags undefined)
1250 ; Peephole 174.b optimized increment (acc not set to %2, flags undefined)
1259 ; Peephole 174.c optimized increment, removed redundant mov
1264 // this one will screw assignes to volatile/sfr's
1270 ; Peephole 177.a removed redundant mov
1271 } if notVolatile %1 %2
1273 // applies to f.e. scott-add.asm (--model-large)
1279 ; Peephole 177.b removed redundant mov
1282 // applies to f.e. bug-408972.c
1287 ; Peephole 177.c removed redundant move
1289 } if notVolatile %1 %2
1291 // applies to f.e. bug-408972.c
1292 // not before peephole 177.c
1300 ; Peephole 177.d removed redundant move
1301 } if notVolatile(%1 %2),operandsNotRelated(%1 %2 %3)
1303 // applies to f.e. bug-607243.c
1304 // also check notVolatile %3, as it will return FALSE if it's @r%1
1309 ; peephole 177.e removed redundant move
1311 } if notVolatile %2 %3
1317 ; peephole 177.f removed redundant move
1325 ; peephole 177.g optimized mov sequence
1334 ; peephole 177.h optimized mov sequence
1344 ; Peephole 178 removed redundant mov
1349 // rules 179-182 provided by : Frieder <fe@lake.iup.uni-heidelberg.de>
1350 // saving 2 byte, 1 cycle
1355 ; Peephole 179 changed mov to clr
1361 // volatile xdata char t; t=0x01; t=0x03;
1371 ; Peephole 180.a removed redundant mov to dptr
1374 // volatile xdata char t; t=0x01; t=0x03; t=0x01;
1388 ; Peephole 180.b removed redundant mov to dptr
1391 // saving 1 byte, 0 cycles
1395 ; Peephole 181 changed mov to clr
1399 // saving 3 bytes, 2 cycles
1400 // provided by Bernhard Held <bernhard.held@de.westinghouse.com>
1405 ; Peephole 182.a used 16 bit load of DPTR
1409 // saving 3 byte, 2 cycles, return(NULL) profits here
1414 ; Peephole 182.b used 16 bit load of dptr
1418 // saving 3 byte, 2 cycles. Probably obsoleted by 182.b
1423 ; Peephole 182.c used 16 bit load of dptr
1424 mov dptr,#(((%2)<<8) + %1)
1427 // applies to return 0.0; in f.e. sincosf.c
1433 ; Peephole 182.d used 16 bit load of dptr
1434 mov dptr,#(%1&0x00ff)
1442 ; Peephole 183 avoided anl during execution
1451 ; Peephole 184 removed redundant mov
1457 // acc being incremented might cause problems
1461 ; Peephole 185 changed order of increment (acc incremented also!)
1466 // char indexed access to: long code table[] = {4,3,2,1};
1488 ; Peephole 186.a optimized movc sequence
1506 // char indexed access to: void* code table[] = {4,3,2,1};
1524 ; Peephole 186.b optimized movc sequence
1538 // char indexed access to: int code table[] = {4,3,2,1};
1552 ; Peephole 186.c optimized movc sequence
1561 // char indexed access to: char code table[] = {4,3,2,1};
1571 ; Peephole 186.d optimized movc sequence
1576 // char indexed access to: int code table[] = {4,3,2,1};
1591 ; Peephole 186.e optimized movc sequence (b, dptr differ)
1609 ; Peephole 187 used a instead of ar%1 for anl
1621 ; Peephole 188 removed redundant mov
1635 ; Peephole 189 removed redundant mov and anl
1638 // rules 190 & 191 need to be in order
1646 ; Peephole 190 removed redundant mov
1660 ; Peephole 191 removed redundant mov
1668 ; Peephole 192 used a instead of ar%1 as source
1684 ; Peephole 193.a optimized misc jump sequence
1694 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1708 ; Peephole 193.b optimized misc jump sequence
1718 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1732 ; Peephole 193.c optimized misc jump sequence
1742 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1753 ; Peephole 194 optimized misc jump sequence
1760 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1772 ; Peephole 195.a optimized misc jump sequence
1780 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1792 ; Peephole 195.b optimized misc jump sequence
1800 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1812 ; Peephole 195.c optimized misc jump sequence
1820 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1830 ; Peephole 196 optimized misc jump sequence
1836 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1846 ; Peephole 197.a optimized misc jump sequence
1852 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1862 ; Peephole 197.b optimized misc jump sequence
1868 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1878 ; Peephole 197.c optimized misc jump sequence
1884 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1893 ; Peephole 198.a optimized misc jump sequence
1898 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1906 ; Peephole 198.b optimized misc jump sequence
1910 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
1916 ; Peephole 200.a removed redundant sjmp
1918 } if labelRefCountChange(%1 -1)
1925 ; Peephole 200.b removed redundant sjmp
1928 } if labelRefCountChange(%1 -1)
1935 ; Peephole 202 removed redundant push pop
1944 ; Peephole 203 removed mov r%1,_spx
1953 ; Peephole 204 removed redundant mov
1965 ; Peephole 205 optimized misc jump sequence
1969 } if labelRefCount(%2 1), labelRefCountChange(%2 -1), labelRefCountChange(%3 -1)
1974 ; Peephole 206 removed redundant mov %1,%1
1977 // Does not seem to be triggered anymore
1983 // ; Peephole 207 removed zero add (acc not set to %1, flags undefined)
1992 ; Peephole 208 removed redundant push pop
1996 // Does not seem to be triggered anymore
2003 // ; Peephole 209 optimized increment (acc not set to %1, flags undefined)
2009 mov dptr,#((((%1 >> 8)) <<8) + %1)
2011 ; Peephole 210 simplified expression
2019 ; Peephole 211 removed redundant push %1 pop %1
2022 // Does not seem to be triggered anymore
2028 // ; Peephole 212 reduced add sequence to inc
2033 // reverts peephole 159? asx8051 cannot handle, too complex?
2035 mov %1,#(( %2 >> 8 ) ^ 0x80)
2037 ; Peephole 213.a inserted fix
2043 mov %1,#(( %2 + %3 >> 8 ) ^ 0x80)
2045 ; Peephole 213.b inserted fix
2046 mov %1,#((%2 + %3) >> 8)
2057 ; Peephole 214 reduced some extra moves
2059 } if operandsNotSame
2066 ; Peephole 215 removed some moves
2069 } if operandsNotSame
2081 ; Peephole 216.a simplified clear (2 bytes)
2100 ; Peephole 216.b simplified clear (3 bytes)
2124 ; Peephole 216.c simplified clear (4 bytes)
2141 ; Peephole 219.a removed redundant clear
2162 ; Peephole 219.b removed redundant clear
2170 ; Peephole 220.a removed bogus DPS set
2178 ; Peephole 220.b removed bogus DPS set
2183 mov %1 + %2,(%2 + %1)
2185 ; Peephole 221.a remove redundant move
2189 mov (%1 + %2 + %3),((%2 + %1) + %3)
2191 ; Peephole 221.b remove redundant move
2198 ; Peephole 222 removed dec/inc pair
2209 ; Peephole 223.a removed redundant dph/dpl moves
2210 } if notVolatile %1 %2
2220 ; Peephole 223.b removed redundant dph/dpl moves
2236 ; Peephole 225 removed redundant move to acc
2248 ; Peephole 226.a removed unnecessary clr
2264 ; Peephole 226.b removed unnecessary clr
2283 ; Peephole 227.a replaced inefficient 32 bit clear
2310 ; Peephole 227.b replaced inefficient 32 constant
2333 ; Peephole 227.c replaced inefficient 16 bit clear
2350 ; Peephole 227.d replaced inefficient 16 bit constant
2359 // this last peephole often removes the last mov from 227.a - 227.d
2364 ; Peephole 227.e removed redundant mov to dptr
2371 ; Peephole 232 using movc to read xdata (--xram-movc)
2379 ; Peephole 233 using _gptrgetc instead of _gptrget (--xram-movc)
2389 ; Peephole 234.a loading dpl directly from a(ccumulator), r%1 not set
2402 ; Peephole 234.b loading dph directly from a(ccumulator), r%1 not set
2409 // 14 rules by Fiorenzo D. Ramaglia <fd.ramaglia@tin.it>
2414 ; Peephole 236.a used r%1 instead of ar%1
2421 ; Peephole 236.b used r%1 instead of ar%1
2428 ; Peephole 236.c used r%1 instead of ar%1
2435 ; Peephole 236.d used r%1 instead of ar%1
2442 ; Peephole 236.e used r%1 instead of ar%1
2449 ; Peephole 236.f used r%1 instead of ar%1
2456 ; Peephole 236.g used r%1 instead of ar%1
2463 ; Peephole 236.h used r%1 instead of ar%1
2470 ; Peephole 236.i used r%1 instead of ar%1
2477 ; Peephole 236.j used r%1 instead of ar%1
2484 ; Peephole 236.k used r%1 instead of ar%1
2491 ; Peephole 236.l used r%1 instead of ar%1
2498 ; Peephole 236.m used r%1 instead of ar%1
2505 ; Peephole 236.n used r%1 instead of ar%1
2516 ; Peephole 237.a removed sjmp to ret
2522 } if labelRefCountChange(%1 -1)
2533 ; Peephole 237.b removed sjmp to ret
2541 } if labelRefCountChange(%1 -1)
2543 // applies to f.e. device/lib/log10f.c
2569 ; Peephole 238.a removed 4 redundant moves
2570 } if operandsNotSame8 %1 %2 %3 %4 %5 %6 %7 %8
2572 // applies to device/lib/log10f.c
2587 ; Peephole 238.b removed 3 redundant moves
2588 } if operandsNotSame7 %1 %2 %3 %4 %5 %6 %7
2590 // applies to f.e. device/lib/time.c
2606 ; Peephole 238.c removed 2 redundant moves
2607 } if operandsNotSame4 %1 %2 %3 %4
2609 // applies to f.e. support/regression/tests/bug-524209.c
2622 ; Peephole 238.d removed 3 redundant moves
2623 } if operandsNotSame6 %1 %2 %3 %4 %5 %6
2625 // applies to f.e. ser_ir.asm
2629 ; Peephole 239 used a instead of acc
2637 ; Peephole 240 use clr instead of addc a,#0
2642 // peepholes 241.a to 241.d and 241.e to 241.h need to be in order
2654 ; Peephole 241.a optimized compare
2663 } if labelRefCountChange(%1 -1)
2665 // applies to generic pointer compare
2676 ; Peephole 241.b optimized compare
2684 } if labelRefCountChange(%1 -1)
2686 // applies to f.e. time.c
2696 ; Peephole 241.c optimized compare
2703 } if labelRefCountChange(%1 -1)
2705 // applies to f.e. malloc.c
2714 ; Peephole 241.d optimized compare
2720 } if labelRefCountChange(%1 -1)
2722 // applies to f.e. j = (k!=0x1000);
2723 // with volatile idata long k;
2738 ; Peephole 241.e optimized compare
2750 } if labelRefCountChange(%2 -1)
2752 // applies to f.e. j = (p!=NULL);
2753 // with volatile idata char *p;
2766 ; Peephole 241.f optimized compare
2776 } if labelRefCountChange(%2 -1)
2778 // applies to f.e. j = (k!=0x1000);
2779 // with volatile idata int k;
2790 ; Peephole 241.g optimized compare
2798 } if labelRefCountChange(%2 -1)
2800 // applies to f.e. vprintf.asm (--stack-auto)
2809 ; Peephole 241.h optimized compare
2815 } if labelRefCountChange(%2 -1)
2817 // applies to f.e. scott-bool1.c
2826 ; Peephole 242.a avoided branch jnz to jz
2829 } if labelRefCount %1 1
2831 // applies to f.e. scott-bool1.c
2842 ; Peephole 242.b avoided branch jnz to jz
2845 } if labelRefCount %1 1
2847 // applies to f.e. logic.c
2862 ; Peephole 242.c avoided branch jnz to jz
2865 } if labelRefCount %1 1
2867 // applies to f.e. vprintf.c
2868 // this is a rare case, usually the "tail increment" is noticed earlier
2875 ; Peephole 243 avoided branch to sjmp
2880 } if labelInRange(), labelRefCountChange(%3 -1), labelRefCountChange(%5 1)
2882 // applies to f.e. simplefloat.c (saving 1 cycle)
2887 ; Peephole 244.a moving first to a instead of r%1
2892 // applies to f.e. _itoa.c (saving 1 cycle)
2897 ; Peephole 244.b moving first to a instead of r%1
2903 // applies to f.e. bug-460010.c (saving 1 cycle)
2909 ; Peephole 244.c loading dpl from a instead of r%1
2918 ; Peephole 244.d loading dph from a instead of r%1
2922 // this one is safe but disables 245.a 245.b
2923 // please remove 245 if 245.a 245.b are found to be safe
2924 // applies to f.e. scott-compare.c
2935 ; Peephole 245 optimized complement (r%1 and acc set needed?)
2940 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2942 // this one will not be triggered if 245 is present
2943 // please remove 245 if 245.a 245.b are found to be safe
2944 // applies to f.e. vprintf.c
2956 ; Peephole 245.a optimized conditional jump (r%1 and acc not set!)
2958 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2960 // this one will not be triggered if 245 is present
2961 // please remove 245 if 245.a 245.b are found to be safe
2962 // applies to f.e. scott-compare.c
2974 ; Peephole 245.b optimized conditional jump (r%1 and acc not set!)
2976 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2979 // rules 246.x apply to f.e. bitfields.c
2992 ; Peephole 246.a combined clr/clr
3009 ; Peephole 246.b combined set/set
3027 ; Peephole 246.c combined set/clr
3045 ; Peephole 246.d combined clr/set
3064 ; Peephole 246.e combined set/clr/clr
3084 ; Peephole 246.f combined set/clr/set
3104 ; Peephole 246.g combined clr/set/clr
3123 ; Peephole 246.h combined clr/set/set
3129 // rules 247.x apply to f.e. bitfields.c
3142 ; Peephole 247.a combined clr/clr
3159 ; Peephole 247.b combined set/set
3177 ; Peephole 247.c combined set/clr
3195 ; Peephole 247.d combined clr/set
3214 ; Peephole 247.e combined set/clr/clr
3234 ; Peephole 247.f combined set/clr/set
3254 ; Peephole 247.g combined clr/set/clr
3273 ; Peephole 247.h combined clr/set/set
3279 // Peepholes 248.x have to be compatible with the keyword volatile.
3280 // They optimize typical accesses to memory mapped I/O devices:
3281 // volatile xdata char t; t|=0x01;
3294 ; Peephole 248.a optimized or to xdata
3299 // volatile xdata char t; t&=0x01;
3312 ; Peephole 248.b optimized and to xdata
3317 // volatile xdata char t; t^=0x01;
3330 ; Peephole 248.c optimized xor to xdata
3335 // volatile xdata char t; t|=0x01; t&=~0x01; t|=0x01;
3357 ; Peephole 248.d optimized or/and/or to volatile xdata
3369 // volatile xdata char t; t&=~0x01; t|=0x01; t&=~0x01;
3391 ; Peephole 248.e optimized and/or/and to volatile xdata
3403 // volatile xdata char t; t|=0x01; t&=~0x01;
3419 ; Peephole 248.f optimized or/and to volatile xdata
3428 // volatile xdata char t; t&=~0x01; t|=0x01;
3444 ; Peephole 248.g optimized and/or to volatile xdata
3453 // volatile xdata char t; t^=0x01; t^=0x01;
3469 ; Peephole 248.h optimized xor/xor to volatile xdata
3478 // Peeepholes 248.i to 248.m are like 248.d to 248.h except they apply to bitfields:
3479 // xdata struct { unsigned b0:1; unsigned b1:1; unsigned b2:1; } xport;
3480 // xport.b0=1; xport.b0=0; xport.b0=1;
3501 ; Peephole 248.i optimized or/and/or to xdata bitfield
3530 ; Peephole 248.j optimized and/or/and to xdata bitfield
3554 ; Peephole 248.k optimized or/and to xdata bitfield
3575 ; Peephole 248.l optimized and/or to xdata bitfield
3596 ; Peephole 248.m optimized xor/xor to xdata bitfield
3607 ; Peephole 249.a jump optimization
3608 } if labelRefCount(%1 1), labelRefCountChange(%1 -1)
3614 ; Peephole 249.b jump optimization
3615 } if labelRefCount(%1 1), labelRefCountChange(%1 -1)
3618 // This allows non-interrupt and interrupt code to safely compete
3619 // for a resource without the non-interrupt code having to disable
3621 // volatile bit resource_is_free;
3622 // if( resource_is_free ) {
3623 // resource_is_free=0; do_something; resource_is_free=1;
3630 ; Peephole 250.a using atomic test and clear
3634 } if labelRefCount(%3 0), labelRefCountChange(%3 1)
3642 ; Peephole 250.b using atomic test and clear
3646 } if labelRefCount %2 1
3649 // not before peephole 250.b
3653 ; Peephole 251.a replaced ljmp to ret with ret
3655 } if labelIsReturnOnly(), labelRefCountChange(%5 -1)
3657 // not before peephole 250.b
3661 ; Peephole 251.b replaced sjmp to ret with ret
3663 } if labelIsReturnOnly(), labelRefCountChange(%5 -1)
3665 // applies to shifts.c and when accessing arrays with an unsigned integer index
3666 // saving 1 byte, 2 cycles
3676 ; Peephole 252 optimized left shift
3685 // applies to: void test( char c ) { if( c ) func1(); else func2(); }
3690 ; Peephole 253.a replaced lcall/ret with ljmp
3694 // applies to: void test( char c ) { if( c ) func1(); else func2(); }
3700 ; Peephole 253.b replaced lcall/ret with ljmp
3703 } if labelRefCount %2 0
3705 // applies to f.e. scott-bool1.c
3711 ; Peephole 253.c replaced lcall with ljmp
3718 // applies to f.e. funptrs.c
3719 // saves one byte if %1 is a register or @register
3725 ; Peephole 254 optimized left shift
3729 // applies to f.e. switch.c
3742 ; Peephole 255 optimized jump table index calculation
3744 cjne a,#(%1+0x01),.+1
3753 // applies to f.e. jump tables and scott-bool1.c.
3754 // similar peepholes can be constructed for other instructions
3755 // after which a flag or a register is known (like: djnz, cjne, jnc)
3763 ; Peephole 256.a removed redundant clr c
3764 } if labelRefCount %2 0
3766 // applies to f.e. logf.c
3774 ; Peephole 256.b removed redundant clr a
3775 } if labelRefCount %2 0
3777 // applies to f.e. bug-905492.c
3785 ; Peephole 256.c loading %3 with zero from a
3787 } if labelRefCount %2 0
3789 // applies to f.e. malloc.c
3799 ; Peephole 256.d loading %3 with zero from a
3801 } if labelRefCount(%2 0),operandsNotRelated('a' %4)
3814 ; Peephole 256.e loading %3 with zero from a
3816 } if labelRefCount(%2 0),operandsNotRelated('a' %4 %6)
3831 ; Peephole 256.f loading %2 with zero from a
3833 } if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
3836 // unsigned char i=8; do{ } while(--i != 0);
3837 // this applies if i is kept in a register
3842 ; Peephole 257.a optimized decrement with compare
3844 } if notVolatile(%1)
3846 // unsigned char i=8; do{ } while(--i != 0);
3847 // this applies if i is kept in data memory
3853 ; Peephole 257.b optimized decrement with compare
3855 } if notVolatile(%1), operandsNotRelated(%1 '@r0' '@r1')
3858 // in_byte<<=1; if(in_bit) in_byte|=1;
3859 // helps f.e. reading data on a 3-wire (SPI) bus
3870 ; Peephole 258.a optimized bitbanging
3876 } if notVolatile(%1), labelRefCountChange(%3 -1)
3878 // in_byte<<=1; if(in_bit) in_byte|=1;
3889 ; Peephole 258.b optimized bitbanging
3895 } if labelRefCountChange(%3 -1)
3897 // in_byte>>=1; if(in_bit) in_byte|=0x80;
3909 ; Peephole 258.c optimized bitbanging
3915 } if notVolatile(%1), labelRefCountChange(%3 -1)
3917 // in_byte>>=1; if(in_bit) in_byte|=0x80;
3929 ; Peephole 258.d optimized bitbanging
3935 } if labelRefCountChange(%3 -1)
3937 // out_bit=out_byte&0x80; out_byte<<=1;
3938 // helps f.e. writing data on a 3-wire (SPI) bus
3948 ; Peephole 258.e optimized bitbanging
3954 // out_bit=out_byte&0x01; out_byte>>=1;
3965 ; Peephole 258.f optimized bitbanging
3972 // Peepholes 259.x rely on the correct labelRefCount. Otherwise they are
3973 // not compatible with peepholes 250.x
3974 // Peepholes 250.x add jumps to a previously unused label. If the
3975 // labelRefCount is not increased, peepholes 259.x are (mistakenly) applied.
3976 // (Mail on sdcc-devel 2004-10-25)
3978 // applies to f.e. vprintf.c
3985 ; Peephole 259.a removed redundant label %2 and ret
3987 } if labelRefCount %2 0
3989 // applies to f.e. gets.c
3996 ; Peephole 259.b removed redundant label %2 and ret
3998 } if labelRefCount %2 0
4000 // optimizing jumptables
4001 // Please note: to enable peephole 260.x you currently have to set
4002 // the environment variable SDCC_SJMP_JUMPTABLE
4014 ; Peephole 260.a used sjmp in jumptable
4025 // optimizing jumptables
4038 ; Peephole 260.b used sjmp in jumptable
4050 // optimizing jumptables
4064 ; Peephole 260.c used sjmp in jumptable
4077 // optimizing jumptables
4092 ; Peephole 260.d used sjmp in jumptable
4106 // optimizing jumptables
4122 ; Peephole 260.e used sjmp in jumptable
4137 // optimizing jumptables
4155 ; Peephole 260.f used sjmp in jumptable
4172 // optimizing jumptables
4191 ; Peephole 260.g used sjmp in jumptable
4209 // optimizing jumptables
4229 ; Peephole 260.h used sjmp in jumptable
4248 // optimizing jumptables
4269 ; Peephole 260.i used sjmp in jumptable
4289 // optimizing jumptables
4311 ; Peephole 260.j used sjmp in jumptable
4332 // optimizing jumptables
4355 ; Peephole 260.k used sjmp in jumptable
4377 // optimizing jumptables
4401 ; Peephole 260.l used sjmp in jumptable
4424 // optimizing jumptables
4449 ; Peephole 260.m used sjmp in jumptable
4473 // applies to: a = (a << 1) | (a >> 15);
4487 ; Peephole 261.a optimized left rol
4495 // applies to: a = (a << 15) | (a >> 1);
4509 ; Peephole 261.b optimized right rol
4521 ; Peephole 262 removed redundant cpl c
4530 ; Peephole 263.a optimized loading const
4539 ; Peephole 263.b optimized loading const
4547 ; Peephole 263.c optimized loading const
4559 ; Peephole 264 jump optimization (acc not set)
4562 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
4564 // should be one of the last peepholes
4568 ; Peephole 300 removed redundant label %1
4569 } if labelRefCount(%1 0)