1 // added by Jean Louis VERN for
7 ; Peephole 2.a removed redundant xch xch
15 ; Peephole 3.a changed mov to clr
25 ; Peephole 3.b changed mov to clr
31 // saving 1 byte, loosing 1 cycle but maybe allowing peephole 3.b to start
36 ; Peephole 3.c changed mov to clr
52 ; Peephole 3.d removed redundant clr
66 ; Peephole 3.e removed redundant clr
82 ; Peephole 3.f removed redundant clr
100 ; Peephole 3.g removed redundant clr
112 ; Peephole 3.h changed mov %3,#0x00 to ...,a
127 ; Peephole 3.i changed mov %4,#0x00 to ...,a
144 ; Peephole 3.j changed mov %5,#0x00 to ...,a
163 ; Peephole 3.k changed mov %6,#0x00 to ...,a
175 ; Peephole 100 removed redundant mov
179 // applies to f.e. lib/src/time.c (--model-large)
196 ; Peephole 101.a removed redundant moves
204 // applies to f.e. support/regression/tests/literalop.c (--model-large)
218 ; Peephole 101.b removed redundant moves
224 // applies to f.e. support/regression/tests/onebyte.c (--model-large)
235 ; Peephole 101.c removed redundant mov
250 ; Peephole 102 removed redundant mov to %1
259 } if notVolatile(%1), labelRefCount(%3 1)
272 ; Peephole 103 removed redundant mov to %1
281 } if labelRefCount(%3 1)
283 // Does not seem to be triggered anymore
290 // ; Peephole 104 optimized increment (acc not set to r%1, flags undefined)
300 ; Peephole 105 removed redundant mov
310 ; Peephole 106 removed redundant mov
317 ; Peephole 107 removed redundant ljmp
319 } if labelRefCountChange(%1 -1)
326 ; Peephole 108.a removed ljmp by inverse jump logic
329 } if labelInRange(), labelRefCountChange(%1 -1)
336 ; Peephole 108.b removed ljmp by inverse jump logic
339 } if labelInRange(), labelRefCountChange(%1 -1)
346 ; Peephole 108.c removed ljmp by inverse jump logic
349 } if labelInRange(), labelRefCountChange(%1 -1)
356 ; Peephole 108.d removed ljmp by inverse jump logic
359 } if labelInRange(), labelRefCountChange(%2 -1)
366 ; Peephole 108.e removed ljmp by inverse jump logic
369 } if labelInRange(), labelRefCountChange(%2 -1)
375 ; Peephole 112.b changed ljmp to sjmp
388 ; Peephole 113.a optimized misc sequence
393 } if labelRefCount %3 1
404 ; Peephole 113.b optimized misc sequence
410 } if labelRefCount %3 2
419 ; Peephole 115.a jump optimization (acc not set)
423 } if labelRefCount %3 1
432 ; Peephole 115.b jump optimization
435 } if labelRefCountChange(%2 -1)
445 ; Peephole 115.c jump optimization (acc not set)
450 } if labelRefCount %3 2
461 ; Peephole 115.d jump optimization (acc not set)
467 } if labelRefCount %3 3
479 ; Peephole 115.e jump optimization (acc not set)
486 } if labelRefCount %3 4
495 ; Peephole 115.f jump optimization (acc not set)
498 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
508 ; Peephole 115.g jump optimization (acc not set)
512 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
523 ; Peephole 115.h jump optimization (acc not set)
528 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
540 ; Peephole 115.i jump optimization (acc not set)
546 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
555 ; Peephole 115.j jump optimization (acc not set)
559 } if labelRefCount %3 1
569 ; Peephole 115.k jump optimization (acc not set)
574 } if labelRefCount %3 2
585 ; Peephole 115.l jump optimization (acc not set)
591 } if labelRefCount %3 3
603 ; Peephole 115.m jump optimization (acc not set)
610 } if labelRefCount %3 4
623 ; Peephole 127 removed misc sequence
625 } if labelRefCount %3 0
632 ; Peephole 128 jump optimization
636 // applies to: bug-524691.c --model-large: while (uRight - uLeft > 1)
642 ; Peephole 129.a jump optimization
646 // applies to: _fsdiv.c --xstack: if (mant1 < mant2)
653 ; Peephole 129.b optimized condition
658 // applies to: time.c --xstack: while((days += (LEAP_YEAR(year) ? 366 : 365)) <= epoch)
666 ; Peephole 129.c optimized condition
670 } if notVolatile %1 %2
672 // applies to: _memmove.c --xstack: if (((int)src < (int)dst) && ((((int)src)+acount) > (int)dst))
681 ; Peephole 129.d optimized condition
686 } if notVolatile %1 %2 %3
691 ; Peephole 130 changed target address mode r%1 to ar%1
701 ; Peephole 131 optimized decrement (not caring for c)
706 // ideally the optimizations of rules 132.x should be done in genCmpXX
713 ; Peephole 132.a optimized genCmpGt by inverse logic (acc differs)
717 } if operandsLiteral(%1)
725 ; Peephole 132.b optimized genCmpGt by inverse logic (acc differs)
729 } if operandsLiteral(%1)
737 ; Peephole 132.c optimized genCmpGt by inverse logic (acc differs)
741 } if operandsLiteral(%1)
743 // disabled. See bug1734654.c
750 // ; Peephole 132.d optimized genCmpGt by inverse logic
754 //} if operandsNotRelated('0x00' %2), operandsLiteral(%2)
762 ; Peephole 132.e optimized genCmpLt by inverse logic (carry differs)
766 } if operandsNotRelated('0x00' %2), operandsLiteral(%2)
774 ; Peephole 132.f optimized genCmpLt by inverse logic (carry differs)
778 } if operandsNotRelated('0x00' %2), operandsLiteral(%2)
789 ; Peephole 133 removed redundant moves
802 ; Peephole 134 removed redundant moves
813 ; Peephole 135 removed redundant mov
826 ; Peephole 136 removed redundant mov
829 // WTF? Doesn't look sensible to me...
839 // ; Peephole 137 optimized misc jump sequence
843 //} if labelRefCount %4 1
854 // ; Peephole 138 optimized misc jump sequence
859 //} if labelRefCount %4 1
866 ; Peephole 139.a removed redundant mov
876 ; Peephole 139.b removed redundant mov
886 ; Peephole 139.c removed redundant mov
891 // applies to genlshOne
898 ; Peephole 140 removed redundant mov
909 ; Peephole 142 removed redundant mov
918 ; Peephole 143.a converted rlc to rl
926 ; Peephole 143.b converted rrc to rc
934 ; Peephole 145.a changed to add without carry
943 ; Peephole 145.b changed to add without carry
948 // 147: Fix compiler output to comply with 8051 instruction set.
952 ; Peephole 147.a changed target address mode r%1 to ar%1
959 ; Peephole 147.b changed target address mode r%1 to ar%1
966 ; Peephole 147.c changed target address mode r%1 to ar%1
976 ; Peephole 150.a removed misc moves via dpl before return
989 ; Peephole 150.b removed misc moves via dph, dpl before return
1001 ; Peephole 150.c removed misc moves via dph, dpl before return
1016 ; Peephole 150.d removed misc moves via dph, dpl, b before return
1029 ; Peephole 150.e removed misc moves via dph, dpl, b before return
1043 ; Peephole 150.f removed misc moves via dph, dpl, b before return
1060 ; Peephole 150.g removed misc moves via dph, dpl, b, a before return
1075 ; Peephole 150.h removed misc moves via dph, dpl, b, a before return
1089 ; Peephole 150.i removed misc moves via dph, dpl, b, a before return
1094 // peephole 213.a might revert this
1099 ; Peephole 159 avoided xrl during execution
1108 ; Peephole 160.a removed sjmp by inverse jump logic
1111 } if labelRefCountChange(%1 -1)
1118 ; Peephole 160.b removed sjmp by inverse jump logic
1121 } if labelRefCountChange(%1 -1)
1128 ; Peephole 160.c removed sjmp by inverse jump logic
1131 } if labelRefCountChange(%1 -1)
1138 ; Peephole 160.d removed sjmp by inverse jump logic
1141 } if labelRefCountChange(%1 -1)
1148 ; Peephole 160.e removed sjmp by inverse jump logic
1151 } if labelRefCountChange(%1 -1)
1158 ; Peephole 160.f removed sjmp by inverse jump logic
1161 } if labelRefCountChange(%1 -1)
1170 ; Peephole 166 removed redundant mov
1171 } if notVolatile %1 %2
1178 ; Peephole 167 removed redundant bit moves (c not set to %1)
1187 ; Peephole 168 jump optimization
1190 } if labelRefCountChange(%2 -1)
1197 ; Peephole 169 jump optimization
1200 } if labelRefCountChange(%2 -1)
1209 ; Peephole 170 jump optimization
1212 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
1222 ; Peephole 171 jump optimization
1226 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
1237 ; Peephole 172 jump optimization
1242 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
1254 ; Peephole 173 jump optimization
1260 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
1270 ; Peephole 174.a optimized decrement (acc not set to %2, flags undefined)
1281 ; Peephole 174.b optimized increment (acc not set to %2, flags undefined)
1290 ; Peephole 174.c optimized increment, removed redundant mov
1295 // this one will screw assignes to volatile/sfr's
1301 ; Peephole 177.a removed redundant mov
1302 } if notVolatile %1 %2
1304 // applies to f.e. scott-add.asm (--model-large)
1310 ; Peephole 177.b removed redundant mov
1313 // applies to f.e. bug-408972.c
1318 ; Peephole 177.c removed redundant mov
1320 } if notVolatile %1 %2
1322 // applies to f.e. bug-408972.c
1323 // not before peephole 177.c
1331 ; Peephole 177.d removed redundant mov
1332 } if notVolatile(%1 %2),operandsNotRelated(%1 %2 %3)
1334 // applies to f.e. bug-607243.c
1335 // also check notVolatile %3, as it will return FALSE if it's @r%1
1340 ; peephole 177.e removed redundant mov
1342 } if notVolatile %2 %3
1348 ; peephole 177.f removed redundant mov
1356 ; peephole 177.g optimized mov sequence
1365 ; peephole 177.h optimized mov sequence
1370 // applies to f.e. testfwk.c
1376 ; peephole 177.i optimized mov sequence
1385 ; Peephole 178 removed redundant mov
1390 // rules 179-182 provided by : Frieder <fe@lake.iup.uni-heidelberg.de>
1391 // saving 2 byte, 1 cycle
1396 ; Peephole 179 changed mov to clr
1402 // volatile xdata char t; t=0x01; t=0x03;
1412 ; Peephole 180.a removed redundant mov to dptr
1415 // volatile xdata char t; t=0x01; t=0x03; t=0x01;
1429 ; Peephole 180.b removed redundant mov to dptr
1432 // saving 1 byte, 0 cycles
1436 ; Peephole 181 changed mov to clr
1440 // saving 3 bytes, 2 cycles
1441 // provided by Bernhard Held <bernhard.held@de.westinghouse.com>
1446 ; Peephole 182.a used 16 bit load of DPTR
1450 // saving 3 byte, 2 cycles, return(NULL) profits here
1455 ; Peephole 182.b used 16 bit load of dptr
1459 // saving 3 byte, 2 cycles. Probably obsoleted by 182.b
1464 ; Peephole 182.c used 16 bit load of dptr
1465 mov dptr,#(((%2)<<8) + %1)
1468 // applies to return 0.0; in f.e. sincosf.c
1474 ; Peephole 182.d used 16 bit load of dptr
1475 mov dptr,#(%1&0x00ff)
1483 ; Peephole 183 avoided anl during execution
1492 ; Peephole 184 removed redundant mov
1498 // acc being incremented might cause problems with register tracking
1502 // ; Peephole 185 changed order of increment (acc incremented also!)
1505 //} if notVolatile %1
1507 // char indexed access to: long code table[] = {4,3,2,1};
1529 ; Peephole 186.a optimized movc sequence
1547 // char indexed access to: void* code table[] = {4,3,2,1};
1565 ; Peephole 186.b optimized movc sequence
1579 // char indexed access to: int code table[] = {4,3,2,1};
1593 ; Peephole 186.c optimized movc sequence
1602 // char indexed access to: char code table[] = {4,3,2,1};
1612 ; Peephole 186.d optimized movc sequence
1617 // char indexed access to: int code table[] = {4,3,2,1};
1632 ; Peephole 186.e optimized movc sequence (b, dptr differ)
1650 ; Peephole 187 used a instead of ar%1 for anl
1662 ; Peephole 188 removed redundant mov
1676 ; Peephole 189 removed redundant mov and anl
1679 // rules 190 & 191 need to be in order
1687 ; Peephole 190 removed redundant mov
1701 ; Peephole 191 removed redundant mov
1704 // applies to f.e. regression/ports/mcs51/support.c
1710 ; Peephole 192.a used a instead of ar%1 as source
1714 // applies to f.e. printf_large.c
1719 ; Peephole 192.b used a instead of ar%1 as destination
1736 ; Peephole 193.a optimized misc jump sequence
1746 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1760 ; Peephole 193.b optimized misc jump sequence
1770 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1784 ; Peephole 193.c optimized misc jump sequence
1794 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1805 ; Peephole 194 optimized misc jump sequence
1812 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1824 ; Peephole 195.a optimized misc jump sequence
1832 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1844 ; Peephole 195.b optimized misc jump sequence
1852 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1864 ; Peephole 195.c optimized misc jump sequence
1872 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1882 ; Peephole 196 optimized misc jump sequence
1888 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1898 ; Peephole 197.a optimized misc jump sequence
1904 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1914 ; Peephole 197.b optimized misc jump sequence
1920 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1930 ; Peephole 197.c optimized misc jump sequence
1936 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1945 ; Peephole 198.a optimized misc jump sequence
1950 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1958 ; Peephole 198.b optimized misc jump sequence
1962 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
1968 ; Peephole 200.a removed redundant sjmp
1970 } if labelRefCountChange(%1 -1)
1977 ; Peephole 200.b removed redundant sjmp
1980 } if labelRefCountChange(%1 -1)
1987 ; Peephole 202 removed redundant push pop
1996 ; Peephole 203 removed mov r%1,_spx
2005 ; Peephole 204 removed redundant mov
2017 ; Peephole 205 optimized misc jump sequence
2021 } if labelRefCount(%2 1), labelRefCountChange(%2 -1), labelRefCountChange(%3 -1)
2026 ; Peephole 206 removed redundant mov %1,%1
2029 // Does not seem to be triggered anymore
2035 // ; Peephole 207 removed zero add (acc not set to %1, flags undefined)
2044 ; Peephole 208 removed redundant push pop
2048 // Does not seem to be triggered anymore
2055 // ; Peephole 209 optimized increment (acc not set to %1, flags undefined)
2061 mov dptr,#((((%1 >> 8)) <<8) + %1)
2063 ; Peephole 210 simplified expression
2071 ; Peephole 211 removed redundant push %1 pop %1
2074 // Does not seem to be triggered anymore
2080 // ; Peephole 212 reduced add sequence to inc
2085 // reverts peephole 159? asx8051 cannot handle, too complex?
2087 mov %1,#(( %2 >> 8 ) ^ 0x80)
2089 ; Peephole 213.a inserted fix
2095 mov %1,#(( %2 + %3 >> 8 ) ^ 0x80)
2097 ; Peephole 213.b inserted fix
2098 mov %1,#((%2 + %3) >> 8)
2109 ; Peephole 214.a removed redundant mov
2111 } if operandsNotSame
2118 ; Peephole 214.b removed redundant mov
2121 } if operandsNotSame
2133 ; Peephole 216.a simplified clear (2 bytes)
2152 ; Peephole 216.b simplified clear (3 bytes)
2176 ; Peephole 216.c simplified clear (4 bytes)
2193 ; Peephole 219.a removed redundant clear
2214 ; Peephole 219.b removed redundant clear
2222 ; Peephole 220.a removed bogus DPS set
2230 ; Peephole 220.b removed bogus DPS set
2235 mov %1 + %2,(%2 + %1)
2237 ; Peephole 221.a remove redundant mov
2241 mov (%1 + %2 + %3),((%2 + %1) + %3)
2243 ; Peephole 221.b remove redundant mov
2250 ; Peephole 222 removed dec/inc pair
2261 ; Peephole 223.a removed redundant dph/dpl moves
2262 } if notVolatile %1 %2
2272 ; Peephole 223.b removed redundant dph/dpl moves
2288 ; Peephole 225 removed redundant move to acc
2300 ; Peephole 226.a removed unnecessary clr
2316 ; Peephole 226.b removed unnecessary clr
2335 ; Peephole 227.a replaced inefficient 32 bit clear
2362 ; Peephole 227.b replaced inefficient 32 constant
2385 ; Peephole 227.c replaced inefficient 16 bit clear
2402 ; Peephole 227.d replaced inefficient 16 bit constant
2411 // this last peephole often removes the last mov from 227.a - 227.d
2416 ; Peephole 227.e removed redundant mov to dptr
2423 ; Peephole 232 using movc to read xdata (--xram-movc)
2431 ; Peephole 233 using _gptrgetc instead of _gptrget (--xram-movc)
2441 ; Peephole 234.a loading dpl directly from a(ccumulator), r%1 not set
2454 ; Peephole 234.b loading dph directly from a(ccumulator), r%1 not set
2461 // 14 rules by Fiorenzo D. Ramaglia <fd.ramaglia@tin.it>
2466 ; Peephole 236.a used r%1 instead of ar%1
2473 ; Peephole 236.b used r%1 instead of ar%1
2480 ; Peephole 236.c used r%1 instead of ar%1
2487 ; Peephole 236.d used r%1 instead of ar%1
2494 ; Peephole 236.e used r%1 instead of ar%1
2501 ; Peephole 236.f used r%1 instead of ar%1
2508 ; Peephole 236.g used r%1 instead of ar%1
2515 ; Peephole 236.h used r%1 instead of ar%1
2522 ; Peephole 236.i used r%1 instead of ar%1
2529 ; Peephole 236.j used r%1 instead of ar%1
2536 ; Peephole 236.k used r%1 instead of ar%1
2543 ; Peephole 236.l used r%1 instead of ar%1
2550 ; Peephole 236.m used r%1 instead of ar%1
2557 ; Peephole 236.n used r%1 instead of ar%1
2568 ; Peephole 237.a removed sjmp to ret
2574 } if labelRefCountChange(%1 -1)
2585 ; Peephole 237.b removed sjmp to ret
2593 } if labelRefCountChange(%1 -1)
2595 // applies to f.e. device/lib/log10f.c
2621 ; Peephole 238.a removed 4 redundant moves
2622 } if operandsNotSame8(%1 %2 %3 %4 %5 %6 %7 %8), notVolatile(%1 %2 %3 %4 %9 %10 %11 %12)
2624 // applies to device/lib/log10f.c
2639 ; Peephole 238.b removed 3 redundant moves
2640 } if operandsNotSame7(%1 %2 %3 %4 %5 %6 %7), notVolatile(%1 %2 %3 %5 %6 %7)
2642 // applies to f.e. device/lib/time.c
2658 ; Peephole 238.c removed 2 redundant moves
2659 } if operandsNotSame4(%1 %2 %3 %4), notVolatile(%1 %2 %5 %6)
2661 // applies to f.e. support/regression/tests/bug-524209.c
2674 ; Peephole 238.d removed 3 redundant moves
2675 } if operandsNotSame6(%1 %2 %3 %4 %5 %6), notVolatile(%1 %2 %3 %4 %5 %6)
2677 // applies to f.e. ser_ir.asm
2681 ; Peephole 239 used a instead of acc
2689 ; Peephole 240 use clr instead of addc a,#0
2694 // peepholes 241.a to 241.d and 241.e to 241.h need to be in order
2706 ; Peephole 241.a optimized compare
2715 } if labelRefCountChange(%1 -1)
2717 // applies to generic pointer compare
2728 ; Peephole 241.b optimized compare
2736 } if labelRefCountChange(%1 -1)
2738 // applies to f.e. time.c
2748 ; Peephole 241.c optimized compare
2755 } if labelRefCountChange(%1 -1)
2757 // applies to f.e. malloc.c
2766 ; Peephole 241.d optimized compare
2772 } if labelRefCountChange(%1 -1)
2774 // applies to f.e. j = (k!=0x1000);
2775 // with volatile idata long k;
2790 ; Peephole 241.e optimized compare
2802 } if labelRefCountChange(%2 -1)
2804 // applies to f.e. j = (p!=NULL);
2805 // with volatile idata char *p;
2818 ; Peephole 241.f optimized compare
2828 } if labelRefCountChange(%2 -1)
2830 // applies to f.e. j = (k!=0x1000);
2831 // with volatile idata int k;
2842 ; Peephole 241.g optimized compare
2850 } if labelRefCountChange(%2 -1)
2852 // applies to f.e. vprintf.asm (--stack-auto)
2861 ; Peephole 241.h optimized compare
2867 } if labelRefCountChange(%2 -1)
2869 // applies to f.e. scott-bool1.c
2878 ; Peephole 242.a avoided branch jnz to jz
2881 } if labelRefCount %1 1
2883 // applies to f.e. scott-bool1.c
2894 ; Peephole 242.b avoided branch jnz to jz
2897 } if labelRefCount %1 1
2899 // applies to f.e. logic.c
2914 ; Peephole 242.c avoided branch jnz to jz
2917 } if labelRefCount %1 1
2919 // applies to f.e. vprintf.c
2920 // this is a rare case, usually the "tail increment" is noticed earlier
2927 ; Peephole 243 avoided branch to sjmp
2932 } if labelInRange(), labelRefCountChange(%3 -1), labelRefCountChange(%5 1)
2934 // applies to f.e. simplefloat.c (saving 1 cycle)
2939 ; Peephole 244.a moving first to a instead of r%1
2944 // applies to f.e. _itoa.c (saving 1 cycle)
2949 ; Peephole 244.b moving first to a instead of r%1
2955 // applies to f.e. bug-460010.c (saving 1 cycle)
2961 ; Peephole 244.c loading dpl from a instead of r%1
2970 ; Peephole 244.d loading dph from a instead of r%1
2974 // this one is safe but disables 245.a 245.b
2975 // please remove 245 if 245.a 245.b are found to be safe
2976 // applies to f.e. scott-compare.c
2987 ; Peephole 245 optimized complement (r%1 and acc set needed?)
2992 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2994 // this one will not be triggered if 245 is present
2995 // please remove 245 if 245.a 245.b are found to be safe
2996 // applies to f.e. vprintf.c
3008 ; Peephole 245.a optimized conditional jump (r%1 and acc not set!)
3010 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
3012 // this one will not be triggered if 245 is present
3013 // please remove 245 if 245.a 245.b are found to be safe
3014 // applies to f.e. scott-compare.c
3026 ; Peephole 245.b optimized conditional jump (r%1 and acc not set!)
3028 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
3031 // rules 246.x apply to f.e. bitfields.c
3044 ; Peephole 246.a combined clr/clr
3061 ; Peephole 246.b combined set/set
3079 ; Peephole 246.c combined set/clr
3097 ; Peephole 246.d combined clr/set
3116 ; Peephole 246.e combined set/clr/clr
3136 ; Peephole 246.f combined set/clr/set
3156 ; Peephole 246.g combined clr/set/clr
3175 ; Peephole 246.h combined clr/set/set
3181 // rules 247.x apply to f.e. bitfields.c
3194 ; Peephole 247.a combined clr/clr
3211 ; Peephole 247.b combined set/set
3229 ; Peephole 247.c combined set/clr
3247 ; Peephole 247.d combined clr/set
3266 ; Peephole 247.e combined set/clr/clr
3286 ; Peephole 247.f combined set/clr/set
3306 ; Peephole 247.g combined clr/set/clr
3325 ; Peephole 247.h combined clr/set/set
3331 // Peepholes 248.x have to be compatible with the keyword volatile.
3332 // They optimize typical accesses to memory mapped I/O devices:
3333 // volatile xdata char t; t|=0x01;
3346 ; Peephole 248.a optimized or to xdata
3351 // volatile xdata char t; t&=0x01;
3364 ; Peephole 248.b optimized and to xdata
3369 // volatile xdata char t; t^=0x01;
3382 ; Peephole 248.c optimized xor to xdata
3387 // volatile xdata char t; t|=0x01; t&=~0x01; t|=0x01;
3409 ; Peephole 248.d optimized or/and/or to volatile xdata
3421 // volatile xdata char t; t&=~0x01; t|=0x01; t&=~0x01;
3443 ; Peephole 248.e optimized and/or/and to volatile xdata
3455 // volatile xdata char t; t|=0x01; t&=~0x01;
3471 ; Peephole 248.f optimized or/and to volatile xdata
3480 // volatile xdata char t; t&=~0x01; t|=0x01;
3496 ; Peephole 248.g optimized and/or to volatile xdata
3505 // volatile xdata char t; t^=0x01; t^=0x01;
3521 ; Peephole 248.h optimized xor/xor to volatile xdata
3530 // Peeepholes 248.i to 248.m are like 248.d to 248.h except they apply to bitfields:
3531 // xdata struct { unsigned b0:1; unsigned b1:1; unsigned b2:1; } xport;
3532 // xport.b0=1; xport.b0=0; xport.b0=1;
3553 ; Peephole 248.i optimized or/and/or to xdata bitfield
3582 ; Peephole 248.j optimized and/or/and to xdata bitfield
3606 ; Peephole 248.k optimized or/and to xdata bitfield
3627 ; Peephole 248.l optimized and/or to xdata bitfield
3648 ; Peephole 248.m optimized xor/xor to xdata bitfield
3659 ; Peephole 249.a jump optimization
3660 } if labelRefCount(%1 1), labelRefCountChange(%1 -1)
3666 ; Peephole 249.b jump optimization
3667 } if labelRefCount(%1 1), labelRefCountChange(%1 -1)
3670 // This allows non-interrupt and interrupt code to safely compete
3671 // for a resource without the non-interrupt code having to disable
3673 // volatile bit resource_is_free;
3674 // if( resource_is_free ) {
3675 // resource_is_free=0; do_something; resource_is_free=1;
3682 ; Peephole 250.a using atomic test and clear
3686 } if labelRefCount(%3 0), labelRefCountChange(%3 1)
3694 ; Peephole 250.b using atomic test and clear
3698 } if labelRefCount %2 1
3701 // not before peephole 250.b
3705 ; Peephole 251.a replaced ljmp to ret with ret
3707 } if labelIsReturnOnly(), labelRefCountChange(%5 -1)
3709 // not before peephole 250.b
3713 ; Peephole 251.b replaced sjmp to ret with ret
3715 } if labelIsReturnOnly(), labelRefCountChange(%5 -1)
3717 // applies to shifts.c and when accessing arrays with an unsigned integer index
3718 // saving 1 byte, 2 cycles
3728 ; Peephole 252 optimized left shift
3737 // unsigned char i=8; do{ } while(--i != 0);
3738 // this applies if i is kept in a register
3743 ; Peephole 253.a optimized decrement with compare
3745 } if notVolatile(%1)
3747 // unsigned char i=8; do{ } while(--i != 0);
3748 // this applies if i is kept in data memory
3749 // must come before 256, see bug 1721024
3755 ; Peephole 253.b optimized decrement with compare
3757 } if notVolatile(%1), operandsNotRelated(%1 '@r0' '@r1')
3760 // applies to f.e. funptrs.c
3761 // saves one byte if %1 is a register or @register
3767 ; Peephole 254 optimized left shift
3771 // applies to f.e. switch.c
3784 ; Peephole 255 optimized jump table index calculation
3786 cjne a,#(%1+0x01),.+1
3795 // applies to f.e. jump tables and scott-bool1.c.
3796 // similar peepholes can be constructed for other instructions
3797 // after which a flag or a register is known (like: djnz, cjne, jnc)
3805 ; Peephole 256.a removed redundant clr c
3806 } if labelRefCount %2 0
3808 // applies to f.e. logf.c
3816 ; Peephole 256.b removed redundant clr a
3817 } if labelRefCount %2 0
3819 // applies to f.e. bug-905492.c
3827 ; Peephole 256.c loading %3 with zero from a
3829 } if labelRefCount %2 0
3831 // applies to f.e. malloc.c
3841 ; Peephole 256.d loading %3 with zero from a
3843 } if labelRefCount(%2 0),operandsNotRelated('a' %4)
3856 ; Peephole 256.e loading %3 with zero from a
3858 } if labelRefCount(%2 0),operandsNotRelated('a' %4 %6)
3873 ; Peephole 256.f loading %2 with zero from a
3875 } if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
3878 // in_byte<<=1; if(in_bit) in_byte|=1;
3879 // helps f.e. reading data on a 3-wire (SPI) bus
3890 ; Peephole 258.a optimized bitbanging
3896 } if notVolatile(%1), labelRefCountChange(%3 -1)
3898 // in_byte<<=1; if(in_bit) in_byte|=1;
3909 ; Peephole 258.b optimized bitbanging
3915 } if labelRefCountChange(%3 -1)
3917 // in_byte>>=1; if(in_bit) in_byte|=0x80;
3929 ; Peephole 258.c optimized bitbanging
3935 } if notVolatile(%1), labelRefCountChange(%3 -1)
3937 // in_byte>>=1; if(in_bit) in_byte|=0x80;
3949 ; Peephole 258.d optimized bitbanging
3955 } if labelRefCountChange(%3 -1)
3957 // out_bit=out_byte&0x80; out_byte<<=1;
3958 // helps f.e. writing data on a 3-wire (SPI) bus
3968 ; Peephole 258.e optimized bitbanging
3974 // out_bit=out_byte&0x01; out_byte>>=1;
3985 ; Peephole 258.f optimized bitbanging
3992 // Peepholes 259.x rely on the correct labelRefCount. Otherwise they are
3993 // not compatible with peepholes 250.x
3994 // Peepholes 250.x add jumps to a previously unused label. If the
3995 // labelRefCount is not increased, peepholes 259.x are (mistakenly) applied.
3996 // (Mail on sdcc-devel 2004-10-25)
3998 // applies to f.e. vprintf.c
4005 ; Peephole 259.a removed redundant label %2 and ret
4007 } if labelRefCount %2 0
4009 // applies to f.e. gets.c
4016 ; Peephole 259.b removed redundant label %2 and ret
4018 } if labelRefCount %2 0
4020 // optimizing jumptables
4021 // Please note: to enable peephole 260.x you currently have to set
4022 // the environment variable SDCC_SJMP_JUMPTABLE
4034 ; Peephole 260.a used sjmp in jumptable
4045 // optimizing jumptables
4058 ; Peephole 260.b used sjmp in jumptable
4070 // optimizing jumptables
4084 ; Peephole 260.c used sjmp in jumptable
4097 // optimizing jumptables
4112 ; Peephole 260.d used sjmp in jumptable
4126 // optimizing jumptables
4142 ; Peephole 260.e used sjmp in jumptable
4157 // optimizing jumptables
4175 ; Peephole 260.f used sjmp in jumptable
4192 // optimizing jumptables
4211 ; Peephole 260.g used sjmp in jumptable
4229 // optimizing jumptables
4249 ; Peephole 260.h used sjmp in jumptable
4268 // optimizing jumptables
4289 ; Peephole 260.i used sjmp in jumptable
4309 // optimizing jumptables
4331 ; Peephole 260.j used sjmp in jumptable
4352 // optimizing jumptables
4375 ; Peephole 260.k used sjmp in jumptable
4397 // optimizing jumptables
4421 ; Peephole 260.l used sjmp in jumptable
4444 // optimizing jumptables
4469 ; Peephole 260.m used sjmp in jumptable
4493 // applies to: a = (a << 1) | (a >> 15);
4507 ; Peephole 261.a optimized left rol
4515 // applies to: a = (a << 15) | (a >> 1);
4529 ; Peephole 261.b optimized right rol
4541 ; Peephole 262 removed redundant cpl c
4550 ; Peephole 263.a optimized loading const
4552 } if notVolatile(%1)
4559 ; Peephole 263.b optimized loading const
4561 } if notVolatile(%1)
4567 ; Peephole 263.c optimized loading const
4569 } if notVolatile(%1)
4579 ; Peephole 264 jump optimization (acc not set)
4582 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
4589 ; Peephole 265 optimized mov/cpl sequence (carry differs)
4592 } if notVolatile(%1)
4598 ; Peephole 266.a optimized mov/jump sequence
4601 } if notVolatile(%1)
4607 ; Peephole 266.b optimized mov/jump sequence
4610 } if notVolatile(%1)
4620 ; Peephole 267.a optimized mov bit sequence
4624 } if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
4634 ; Peephole 267.b optimized mov bit sequence
4638 } if labelRefCount(%1 1), labelRefCountChange(%1 -1), labelRefCountChange(%3 -1)
4644 ; Peephole 268 removed redundant mov
4646 } if notVolatile(%1)
4652 ; Peephole 269 removed redundant mov
4654 } if notVolatile(%1)
4656 //accessing struct/array on stack
4661 ; Peephole 270 removed redundant add
4667 ; Peephole 300 pop ar%1 removed
4673 ; Peephole 301 mov r%1,%2 removed
4674 } if notVolatile(%2), deadMove(%1)
4677 // applies to: void test( char c ) { if( c ) func1(); else func2(); }
4682 ; Peephole 400.a replaced lcall/ret with ljmp
4686 // applies to: void test( char c ) { if( c ) func1(); else func2(); }
4692 ; Peephole 400.b replaced lcall/ret with ljmp
4695 } if labelRefCount %2 0
4697 // applies to f.e. scott-bool1.c
4703 ; Peephole 400.c replaced lcall with ljmp
4709 // for programs less than 2k
4713 ; Peephole 400.d replaced lcall with acall
4717 // for programs less than 2k
4721 ; Peephole 400.e replaced ljmp with ajmp
4726 // should be one of the last peepholes
4730 ; Peephole 500 removed redundant label %1
4731 } if labelRefCount(%1 0)