5 // ; Peephole 1 removed pop %1 push %1 (not push pop)
13 // ; Peephole 2 removed pop %1 push %1 (not push pop)
18 // added by Jean Louis VERN for
24 ; Peephole 2.a removed redundant xch xch
32 ; Peephole 3.a changed mov to clr
42 ; Peephole 3.b changed mov to clr
48 // saving 1 byte, loosing 1 cycle but maybe allowing peephole 3.b to start
53 ; Peephole 3.c changed mov to clr
69 ; Peephole 3.d removed redundant clr
83 ; Peephole 3.e removed redundant clr
99 ; Peephole 3.f removed redundant clr
117 ; Peephole 3.g removed redundant clr
129 ; Peephole 3.h changed mov %3,#0x00 to %3,a
144 ; Peephole 3.i changed mov %4,#0x00 to %4,a
161 ; Peephole 3.j changed mov %5,#0x00 to %5,a
180 ; Peephole 3.k changed mov %6,#0x00 to %6,a
190 ; Peephole 100 removed redundant mov
203 ; Peephole 101 removed redundant mov
221 ; Peephole 102 removed redundant mov
243 ; Peephole 103 removed redundant mov
260 ; Peephole 104 optimized increment (acc not set to r%1, flags undefined)
269 ; Peephole 105 removed redundant mov
278 ; Peephole 106 removed redundant mov
287 ; Peephole 107 removed redundant ljmp
289 } labelRefCountChange(%1 -1)
296 ; Peephole 108 removed ljmp by inverse jump logic
299 } if labelInRange(), labelRefCountChange(%1 -1)
306 ; Peephole 109 removed ljmp by inverse jump logic
309 } if labelInRange(), labelRefCountChange(%1 -1)
316 ; Peephole 110 removed ljmp by inverse jump logic
319 } if labelInRange(), labelRefCountChange(%1 -1)
326 ; Peephole 111 removed ljmp by inverse jump logic
329 } if labelInRange(), labelRefCountChange(%2 -1)
336 ; Peephole 112.a removed ljmp by inverse jump logic
339 } if labelInRange(), labelRefCountChange(%2 -1)
345 ; Peephole 112.b changed ljmp to sjmp
358 ; Peephole 113 optimized misc sequence
363 } if labelRefCount %3 1
374 ; Peephole 114 optimized misc sequence
380 } if labelRefCount %3 2
389 ; Peephole 115.a jump optimization (acc not set)
393 } if labelRefCount %3 1
401 ; Peephole 115.b jump optimization
405 } labelRefCountChange(%2 -1)
415 ; Peephole 116 jump optimization (acc not set)
420 } if labelRefCount %3 2
431 ; Peephole 117 jump optimization (acc not set)
437 } if labelRefCount %3 3
449 ; Peephole 118 jump optimization (acc not set)
456 } if labelRefCount %3 4
465 ; Peephole 119 jump optimization (acc not set)
468 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
478 ; Peephole 120 jump optimization (acc not set)
482 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
493 ; Peephole 121 jump optimization (acc not set)
498 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
510 ; Peephole 122 jump optimization (acc not set)
516 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
525 ; Peephole 123 jump optimization (acc not set)
529 } if labelRefCount %3 1
539 ; Peephole 124 jump optimization (acc not set)
544 } if labelRefCount %3 2
555 ; Peephole 125 jump optimization (acc not set)
561 } if labelRefCount %3 3
573 ; Peephole 126 jump optimization (acc not set)
580 } if labelRefCount %3 4
593 ; Peephole 127 removed misc sequence
595 } if labelRefCount %3 0
602 ; Peephole 128 jump optimization
606 // applies to: bug-524691.c --model-large: while (uRight - uLeft > 1)
612 ; Peephole 129.a jump optimization
616 // applies to: _fsdiv.c --xstack: if (mant1 < mant2)
623 ; Peephole 129.b optimized condition
628 // applies to: time.c --xstack: while((days += (LEAP_YEAR(year) ? 366 : 365)) <= epoch)
636 ; Peephole 129.c optimized condition
640 } if notVolatile %1 %2
642 // applies to: _memmove.c --xstack: if (((int)src < (int)dst) && ((((int)src)+acount) > (int)dst))
651 ; Peephole 129.d optimized condition
656 } if notVolatile %1 %2 %3
661 ; Peephole 130 changed target address mode r%1 to ar%1
671 ; Peephole 131 optimized decrement (not caring for c)
676 // ideally the optimizations of rules 132.x should be done in genCmpXX
683 ; Peephole 132.a optimized genCmpGt by inverse logic (acc differs)
695 ; Peephole 132.b optimized genCmpGt by inverse logic (acc differs)
707 ; Peephole 132.c optimized genCmpGt by inverse logic (acc differs)
719 ; Peephole 132.d optimized genCmpGt by inverse logic
723 } if operandsNotRelated('0x00' %2)
731 ; Peephole 132.e optimized genCmpLt by inverse logic (carry differs)
735 } if operandsNotRelated('0x00' %2)
743 ; Peephole 132.f optimized genCmpLt by inverse logic (carry differs)
747 } if operandsNotRelated('0x00' %2)
757 ; Peephole 133 removed redundant moves
770 ; Peephole 134 removed redundant moves
781 ; Peephole 135 removed redundant mov
792 ; Peephole 136 removed redundant moves
798 // WTF? Doesn't look sensible to me...
808 // ; Peephole 137 optimized misc jump sequence
812 //} if labelRefCount %4 1
823 // ; Peephole 138 optimized misc jump sequence
828 //} if labelRefCount %4 1
835 ; Peephole 139 removed redundant mov
845 ; Peephole 140 removed redundant mov
854 ; Peephole 141 removed redundant mov
864 ; Peephole 142 removed redundant moves
873 ; Peephole 143 converted rlc to rl
881 ; Peephole 144 converted rrc to rc
889 ; Peephole 145 changed to add without carry
898 ; Peephole 146 changed to add without carry
906 ; Peephole 147 changed target address mode r%1 to ar%1
913 ; Peephole 148 changed target address mode r%1 to ar%1
920 ; Peephole 149 changed target address mode r%1 to ar%1
930 ; Peephole 150 removed misc moves via dpl before return
943 ; Peephole 151 removed misc moves via dph, dpl before return
955 ; Peephole 152 removed misc moves via dph, dpl before return
970 ; Peephole 153 removed misc moves via dph, dpl, b before return
983 ; Peephole 154 removed misc moves via dph, dpl, b before return
997 ; Peephole 155 removed misc moves via dph, dpl, b before return
1014 ; Peephole 156 removed misc moves via dph, dpl, b, a before return
1029 ; Peephole 157 removed misc moves via dph, dpl, b, a before return
1043 ; Peephole 158 removed misc moves via dph, dpl, b, a before return
1052 ; Peephole 159 avoided xrl during execution
1061 ; Peephole 160 removed sjmp by inverse jump logic
1064 } labelRefCountChange(%1 -1)
1071 ; Peephole 161 removed sjmp by inverse jump logic
1074 } labelRefCountChange(%1 -1)
1081 ; Peephole 162 removed sjmp by inverse jump logic
1084 } labelRefCountChange(%1 -1)
1091 ; Peephole 163 removed sjmp by inverse jump logic
1094 } labelRefCountChange(%1 -1)
1101 ; Peephole 164 removed sjmp by inverse jump logic
1104 } labelRefCountChange(%1 -1)
1111 ; Peephole 165 removed sjmp by inverse jump logic
1114 } labelRefCountChange(%1 -1)
1121 ; Peephole 166 removed redundant mov
1124 } if notVolatile %1 %2
1131 ; Peephole 167 removed redundant bit moves (c not set to %1)
1140 ; Peephole 168 jump optimization
1143 } labelRefCountChange(%2 -1)
1150 ; Peephole 169 jump optimization
1153 } labelRefCountChange(%2 -1)
1162 ; Peephole 170 jump optimization
1165 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
1175 ; Peephole 171 jump optimization
1179 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%4 1)
1190 ; Peephole 172 jump optimization
1195 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%4 2)
1207 ; Peephole 173 jump optimization
1213 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%4 3)
1222 ; Peephole 174 optimized decrement (acc not set to %2, flags undefined)
1233 ; Peephole 175 optimized increment (acc not set to %2, flags undefined)
1243 ; Peephole 176 optimized increment, removed redundant mov
1248 // this one will screw assignes to volatile/sfr's
1253 ; Peephole 177.a removed redundant mov
1255 } if notVolatile %1 %2
1257 // applies to f.e. scott-add.asm (--model-large)
1262 ; Peephole 177.b removed redundant mov
1266 // applies to f.e. bug-408972.c
1271 ; Peephole 177.c removed redundant move
1273 } if notVolatile %1 %2
1275 // applies to f.e. bug-408972.c
1276 // not before peephole 177.c
1282 ; Peephole 177.d removed redundant move
1285 } if notVolatile(%1 %2),operandsNotRelated(%1 %3)
1287 // applies to f.e. bug-607243.c
1288 // also check notVolatile %3, as it will return FALSE if it's @r%1
1293 ; peephole 177.e removed redundant move
1295 } if notVolatile %2 %3
1301 ; peephole 177.f removed redundant move
1309 ; peephole 177.g optimized mov sequence
1318 ; peephole 177.h optimized mov sequence
1328 ; Peephole 178 removed redundant mov
1333 // rules 179-182 provided by : Frieder <fe@lake.iup.uni-heidelberg.de>
1334 // saving 2 byte, 1 cycle
1339 ; Peephole 179 changed mov to clr
1345 // volatile xdata char t; t=0x01; t=0x03;
1352 ; Peephole 180.a removed redundant mov to dptr
1358 // volatile xdata char t; t=0x01; t=0x03; t=0x01;
1367 ; Peephole 180.b removed redundant mov to dptr
1375 // saving 1 byte, 0 cycles
1379 ; Peephole 181 changed mov to clr
1383 // saving 3 bytes, 2 cycles
1384 // provided by Bernhard Held <bernhard.held@de.westinghouse.com>
1389 ; Peephole 182.a used 16 bit load of DPTR
1393 // saving 3 byte, 2 cycles, return(NULL) profits here
1398 ; Peephole 182.b used 16 bit load of dptr
1402 // saving 3 byte, 2 cycles. Probably obsoleted by 182.b
1407 ; Peephole 182.c used 16 bit load of dptr
1408 mov dptr,#(((%2)<<8) + %1)
1411 // applies to return 0.0; in f.e. sincosf.c
1417 ; Peephole 182.d used 16 bit load of dptr
1418 mov dptr,#(%1&0x00ff)
1426 ; Peephole 183 avoided anl during execution
1435 ; Peephole 184 removed redundant mov
1441 // acc being incremented might cause problems
1445 ; Peephole 185 changed order of increment (acc incremented also!)
1470 ; Peephole 186.a optimized movc sequence
1503 ; Peephole 186.b optimized movc sequence
1528 ; Peephole 186.c optimized movc sequence
1537 // char indexed access to: char code table[] = {4,3,2,1};
1547 ; Peephole 186.d optimized movc sequence
1552 // char indexed access to: int code table[] = {4,3,2,1};
1567 ; Peephole 186.e optimized movc sequence (b, dptr differ)
1585 ; Peephole 187 used a instead of ar%1 for anl
1597 ; Peephole 188 removed redundant mov
1609 ; Peephole 189 removed redundant mov and anl
1614 // rules 190 & 191 need to be in order
1620 ; Peephole 190 removed redundant mov
1632 ; Peephole 191 removed redundant mov
1643 ; Peephole 192 used a instead of ar%1 as source
1660 ; Peephole 193.a optimized misc jump sequence
1670 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1684 ; Peephole 193.b optimized misc jump sequence
1694 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1708 ; Peephole 193.c optimized misc jump sequence
1718 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1729 ; Peephole 194 optimized misc jump sequence
1736 } if labelRefCount(%3 4), labelRefCountChange(%3 -4), labelRefCountChange(%8 3)
1748 ; Peephole 195.a optimized misc jump sequence
1756 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1768 ; Peephole 195.b optimized misc jump sequence
1776 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1788 ; Peephole 195.c optimized misc jump sequence
1796 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1806 ; Peephole 196 optimized misc jump sequence
1812 } if labelRefCount(%3 3), labelRefCountChange(%3 -3), labelRefCountChange(%8 2)
1822 ; Peephole 197.a optimized misc jump sequence
1828 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1838 ; Peephole 197.b optimized misc jump sequence
1844 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1854 ; Peephole 197.c optimized misc jump sequence
1860 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1869 ; Peephole 198 optimized misc jump sequence
1874 } if labelRefCount(%3 2), labelRefCountChange(%3 -2), labelRefCountChange(%8 1)
1882 ; Peephole 199 optimized misc jump sequence
1886 } if labelRefCount(%3 1), labelRefCountChange(%3 -1)
1892 ; Peephole 200 removed redundant sjmp
1894 } labelRefCountChange(%1 -1)
1901 ; Peephole 201 removed redundant sjmp
1904 } labelRefCountChange(%1 -1)
1911 ; Peephole 202 removed redundant push pop
1920 ; Peephole 203 removed mov r%1,_spx
1929 ; Peephole 204 removed redundant mov
1941 ; Peephole 205 optimized misc jump sequence
1945 } if labelRefCount(%2 1), labelRefCountChange(%2 -1), labelRefCountChange(%3 -1)
1950 ; Peephole 206 removed redundant mov %1,%1
1958 ; Peephole 207 removed zero add (acc not set to %1, flags undefined)
1967 ; Peephole 208 removed redundant push pop
1977 ; Peephole 209 optimized increment (acc not set to %1, flags undefined)
1983 mov dptr,#((((%1 >> 8)) <<8) + %1)
1985 ; Peephole 210 simplified expression
1993 ; Peephole 211 removed redundant push %1 pop %1
2001 ; Peephole 212 reduced add sequence to inc
2007 mov %1,#(( %2 >> 8 ) ^ 0x80)
2009 ; Peephole 213.a inserted fix
2015 mov %1,#(( %2 + %3 >> 8 ) ^ 0x80)
2017 ; Peephole 213.b inserted fix
2018 mov %1,#((%2 + %3) >> 8)
2028 ; Peephole 214 reduced some extra moves
2031 } if operandsNotSame
2038 ; Peephole 215 removed some moves
2041 } if operandsNotSame
2051 ; Peephole 216 simplified clear (2bytes)
2070 ; Peephole 217 simplified clear (3bytes)
2094 ; Peephole 218 simplified clear (4bytes)
2113 ; Peephole 219 removed redundant clear
2129 ; Peephole 219.a removed redundant clear
2142 ; Peephole 220.a removed bogus DPS set
2150 ; Peephole 220.b removed bogus DPS set
2155 mov %1 + %2,(%2 + %1)
2157 ; Peephole 221.a remove redundant move
2161 mov (%1 + %2 + %3),((%2 + %1) + %3)
2163 ; Peephole 221.b remove redundant move
2170 ; Peephole 222 removed dec/inc pair
2179 ; Peephole 223 removed redundant dph/dpl moves
2182 } if notVolatile %1 %2
2190 ; Peephole 224 removed redundant dph/dpl moves
2203 ; Peephole 225 removed redundant move to acc
2219 ; Peephole 226 removed unnecessary clr
2241 ; Peephole 227 replaced inefficient 32 bit clear
2269 ; Peephole 228 replaced inefficient 32 constant
2291 ; Peephole 229 replaced inefficient 16 bit clear
2309 ; Peephole 230 replaced inefficient 16 bit constant
2319 // this last peephole often removes the last mov from 227-230
2324 ; Peephole 231 removed redundant mov to dptr
2331 ; Peephole 232 using movc to read xdata (--xram-movc)
2339 ; Peephole 233 using _gptrgetc instead of _gptrget (--xram-movc)
2349 ; Peephole 234 loading dpl directly from a(ccumulator), r%1 not set
2362 ; Peephole 235 loading dph directly from a(ccumulator), r%1 not set
2369 // 14 rules by Fiorenzo D. Ramaglia <fd.ramaglia@tin.it>
2374 ; Peephole 236.a used r%1 instead of ar%1
2381 ; Peephole 236.b used r%1 instead of ar%1
2388 ; Peephole 236.c used r%1 instead of ar%1
2395 ; Peephole 236.d used r%1 instead of ar%1
2402 ; Peephole 236.e used r%1 instead of ar%1
2409 ; Peephole 236.f used r%1 instead of ar%1
2416 ; Peephole 236.g used r%1 instead of ar%1
2423 ; Peephole 236.h used r%1 instead of ar%1
2430 ; Peephole 236.i used r%1 instead of ar%1
2437 ; Peephole 236.j used r%1 instead of ar%1
2444 ; Peephole 236.k used r%1 instead of ar%1
2451 ; Peephole 236.l used r%1 instead of ar%1
2458 ; Peephole 236.m used r%1 instead of ar%1
2465 ; Peephole 236.n used r%1 instead of ar%1
2476 ; Peephole 237.a removed sjmp to ret
2482 } labelRefCountChange(%1 -1)
2493 ; Peephole 237.b removed sjmp to ret
2501 } labelRefCountChange(%1 -1)
2503 // applies to f.e. device/lib/log10f.c
2529 ; Peephole 238.a removed 4 redundant moves
2530 } if operandsNotSame8 %1 %2 %3 %4 %5 %6 %7 %8
2532 // applies to device/lib/log10f.c
2547 ; Peephole 238.b removed 3 redundant moves
2548 } if operandsNotSame7 %1 %2 %3 %4 %5 %6 %7
2550 // applies to f.e. device/lib/time.c
2566 ; Peephole 238.c removed 2 redundant moves
2567 } if operandsNotSame4 %1 %2 %3 %4
2569 // applies to f.e. support/regression/tests/bug-524209.c
2582 ; Peephole 238.d removed 3 redundant moves
2583 } if operandsNotSame6 %1 %2 %3 %4 %5 %6
2585 // applies to f.e. ser_ir.asm
2589 ; Peephole 239 used a instead of acc
2597 ; Peephole 240 use clr instead of addc a,#0
2602 // peepholes 241.a to 241.c and 241.d to 241.f need to be in order
2614 ; Peephole 241.a optimized compare
2623 } labelRefCountChange(%10 -1)
2625 // applies to f.e. time.c
2635 ; Peephole 241.b optimized compare
2642 } labelRefCountChange(%6 -1)
2644 // applies to f.e. malloc.c
2653 ; Peephole 241.c optimized compare
2659 } labelRefCountChange(%4 -1)
2661 // applies to f.e. j = (k!=0x1000);
2662 // with volatile idata long k;
2677 ; Peephole 241.d optimized compare
2689 } labelRefCountChange(%7 -1)
2691 // applies to f.e. j = (k!=0x1000);
2692 // with volatile idata int k;
2703 ; Peephole 241.e optimized compare
2711 } labelRefCountChange(%7 -1)
2713 // applies to f.e. vprintf.asm (--stack-auto)
2722 ; Peephole 241.f optimized compare
2728 } labelRefCountChange(%7 -1)
2730 // applies to f.e. scott-bool1.c
2737 ; Peephole 242.a avoided branch jnz to jz
2742 } if labelRefCount %1 1
2744 // applies to f.e. scott-bool1.c
2752 ; Peephole 242.b avoided branch jnz to jz
2758 } if labelRefCount %1 1
2760 // applies to f.e. logic.c
2770 ; Peephole 242.c avoided branch jnz to jz
2778 } if labelRefCount %1 1
2780 // applies to f.e. vprintf.c
2781 // this is a rare case, usually the "tail increment" is noticed earlier
2788 ; Peephole 243 avoided branch to sjmp
2793 } if labelInRange(), labelRefCountChange(%3 -1), labelRefCountChange(%5 1)
2795 // applies to f.e. simplefloat.c (saving 1 cycle)
2800 ; Peephole 244.a moving first to a instead of r%1
2805 // applies to f.e. _itoa.c (saving 1 cycle)
2810 ; Peephole 244.b moving first to a instead of r%1
2816 // applies to f.e. bug-460010.c (saving 1 cycle)
2821 ; Peephole 244.c loading dpl from a instead of r%1
2830 ; Peephole 244.d loading dph from a instead of r%1
2835 // this one is safe but disables 245.a 245.b
2836 // please remove 245 if 245.a 245.b are found to be safe
2837 // applies to f.e. scott-compare.c
2848 ; Peephole 245 optimized complement (r%1 and acc set needed?)
2853 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2855 // this one will not be triggered if 245 is present
2856 // please remove 245 if 245.a 245.b are found to be safe
2857 // applies to f.e. vprintf.c
2869 ; Peephole 245.a optimized conditional jump (r%1 and acc not set!)
2871 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2873 // this one will not be triggered if 245 is present
2874 // please remove 245 if 245.a 245.b are found to be safe
2875 // applies to f.e. scott-compare.c
2887 ; Peephole 245.b optimized conditional jump (r%1 and acc not set!)
2889 } if labelRefCount(%2 1), labelRefCountChange(%2 -1)
2892 // rules 246.x apply to f.e. bitfields.c
2903 ; Peephole 246.a combined clr/clr
2920 ; Peephole 246.b combined set/set
2937 ; Peephole 246.c combined set/clr
2955 ; Peephole 246.d combined clr/set
2974 ; Peephole 246.e combined set/clr/clr
2993 ; Peephole 246.f combined set/clr/set
3013 ; Peephole 246.g combined clr/set/clr
3033 ; Peephole 246.h combined clr/set/set
3044 // rules 247.x apply to f.e. bitfields.c
3055 ; Peephole 247.a combined clr/clr
3072 ; Peephole 247.b combined set/set
3089 ; Peephole 247.c combined set/clr
3107 ; Peephole 247.d combined clr/set
3126 ; Peephole 247.e combined set/clr/clr
3145 ; Peephole 247.f combined set/clr/set
3165 ; Peephole 247.g combined clr/set/clr
3185 ; Peephole 247.h combined clr/set/set
3194 // Peepholes 248.x have to be compatible with the keyword volatile.
3195 // They optimize typical accesses to memory mapped I/O devices:
3196 // volatile xdata char t; t|=0x01;
3206 ; Peephole 248.a optimized or to xdata
3214 // volatile xdata char t; t&=0x01;
3224 ; Peephole 248.b optimized and to xdata
3232 // volatile xdata char t; t^=0x01;
3242 ; Peephole 248.c optimized xor to xdata
3250 // volatile xdata char t; t|=0x01; t&=~0x01; t|=0x01;
3270 ; Peephole 248.d optimized or/and/or to volatile xdata
3284 // volatile xdata char t; t&=~0x01; t|=0x01; t&=~0x01;
3304 ; Peephole 248.e optimized and/or/and to volatile xdata
3318 // volatile xdata char t; t|=0x01; t&=~0x01;
3332 ; Peephole 248.f optimized or/and to volatile xdata
3343 // volatile xdata char t; t&=~0x01; t|=0x01;
3357 ; Peephole 248.g optimized and/or to volatile xdata
3368 // volatile xdata char t; t^=0x01; t^=0x01;
3382 ; Peephole 248.h optimized xor/xor to volatile xdata
3393 // Peeepholes 248.i to 248.m are like 248.d to 248.h except they apply to bitfields:
3394 // xdata struct { unsigned b0:1; unsigned b1:1; unsigned b2:1; } xport;
3395 // xport.b0=1; xport.b0=0; xport.b0=1;
3412 ; Peephole 248.i optimized or/and/or to xdata bitfield
3441 ; Peephole 248.j optimized and/or/and to xdata bitfield
3465 ; Peephole 248.k optimized or/and to xdata bitfield
3486 ; Peephole 248.l optimized and/or to xdata bitfield
3507 ; Peephole 248.m optimized xor/xor to xdata bitfield
3522 ; Peephole 249.a jump optimization
3523 } if labelRefCount(%1 1), labelRefCountChange(%1 -1)
3529 ; Peephole 249.b jump optimization
3530 } if labelRefCount(%1 1), labelRefCountChange(%1 -1)
3533 // This allows non-interrupt and interrupt code to safely compete
3534 // for a resource without the non-interrupt code having to disable
3536 // volatile bit resource_is_free;
3537 // if( resource_is_free ) {
3538 // resource_is_free=0; do_something; resource_is_free=1;
3545 ; Peephole 250.a using atomic test and clear
3549 } if labelRefCount(%3 0), labelRefCountChange(%3 1)
3557 ; Peephole 250.b using atomic test and clear
3561 } if labelRefCount %2 1
3564 // not before peephole 250.b
3568 ; Peephole 251.a replaced ljmp to ret with ret
3570 } if labelIsReturnOnly(), labelRefCountChange(%5 -1)
3572 // not before peephole 250.b
3576 ; Peephole 251.b replaced sjmp to ret with ret
3578 } if labelIsReturnOnly(), labelRefCountChange(%5 -1)
3580 // applies to shifts.c and when accessing arrays with an unsigned integer index
3581 // saving 1 byte, 2 cycles
3591 ; Peephole 252 optimized left shift
3600 // applies to: void test( char c ) { if( c ) func1(); else func2(); }
3605 ; Peephole 253.a replaced lcall/ret with ljmp
3609 // applies to: void test( char c ) { if( c ) func1(); else func2(); }
3615 ; Peephole 253.b replaced lcall/ret with ljmp
3618 } if labelRefCount %2 0
3620 // applies to f.e. scott-bool1.c
3626 ; Peephole 253.c replaced lcall with ljmp
3633 // applies to f.e. funptrs.c
3634 // saves one byte if %1 is a register or @register
3639 ; Peephole 254 optimized left shift
3644 // applies to f.e. switch.c
3657 ; Peephole 255 optimized jump table index calculation
3659 cjne a,#(%1+0x01),.+1
3668 // applies to f.e. jump tables and scott-bool1.c.
3669 // similar peepholes can be constructed for other instructions
3670 // after which a flag or a register is known (like: djnz, cjne, jnc)
3676 ; Peephole 256.a removed redundant clr c
3679 } if labelRefCount %2 0
3681 // applies to f.e. logf.c
3687 ; Peephole 256.b removed redundant clr a
3690 } if labelRefCount %2 0
3692 // applies to f.e. bug-905492.c
3698 ; Peephole 256.c loading %3 with zero from a
3702 } if labelRefCount %2 0
3704 // applies to f.e. malloc.c
3711 ; Peephole 256.d loading %3 with zero from a
3716 } if labelRefCount(%2 0),operandsNotRelated('a' %4)
3725 ; Peephole 256.e loading %3 with zero from a
3731 } if labelRefCount(%2 0),operandsNotRelated('a' %4 %6)
3741 ; Peephole 256.f loading %2 with zero from a
3748 } if labelRefCount(%2 0),operandsNotRelated('a' %4 %6 %8)
3751 // unsigned char i=8; do{ } while(--i != 0);
3752 // this currently only applies if i is kept in a register
3757 ; Peephole 257 optimized decrement with compare
3762 // in_byte<<=1; if(in_bit) in_byte|=1;
3763 // helps f.e. reading data on a 3-wire (SPI) bus
3773 ; Peephole 258.a optimized bitbanging
3780 } if notVolatile(%1), labelRefCountChange(%3 -1)
3782 // in_byte<<=1; if(in_bit) in_byte|=1;
3792 ; Peephole 258.b optimized bitbanging
3799 } labelRefCountChange(%3 -1)
3801 // in_byte>>=1; if(in_bit) in_byte|=0x80;
3812 ; Peephole 258.c optimized bitbanging
3819 } if notVolatile(%1), labelRefCountChange(%3 -1)
3821 // in_byte>>=1; if(in_bit) in_byte|=0x80;
3832 ; Peephole 258.d optimized bitbanging
3839 } labelRefCountChange(%3 -1)
3841 // out_bit=out_byte&0x80; out_byte<<=1;
3842 // helps f.e. writing data on a 3-wire (SPI) bus
3851 ; Peephole 258.e optimized bitbanging
3858 // out_bit=out_byte&0x01; out_byte>>=1;
3868 ; Peephole 258.f optimized bitbanging
3876 // Peepholes 259.x rely on the correct labelRefCount. Otherwise they are
3877 // not compatible with peepholes 250.x
3878 // Peepholes 250.x add jumps to a previously unused label. If the
3879 // labelRefCount is not increased, peepholes 259.x are (mistakenly) applied.
3880 // (Mail on sdcc-devel 2004-10-25)
3882 // applies to f.e. vprintf.c
3889 ; Peephole 259.a removed redundant label %2 and ret
3891 } if labelRefCount %2 0
3893 // applies to f.e. gets.c
3900 ; Peephole 259.b removed redundant label %2 and ret
3902 } if labelRefCount %2 0
3904 // optimizing jumptables
3905 // Please note: to enable peephole 260.x you currently have to set
3906 // the environment variable SDCC_SJMP_JUMPTABLE
3918 ; Peephole 260.a used sjmp in jumptable
3929 // optimizing jumptables
3942 ; Peephole 260.b used sjmp in jumptable
3954 // optimizing jumptables
3968 ; Peephole 260.c used sjmp in jumptable
3981 // optimizing jumptables
3996 ; Peephole 260.d used sjmp in jumptable
4010 // optimizing jumptables
4026 ; Peephole 260.e used sjmp in jumptable
4041 // optimizing jumptables
4059 ; Peephole 260.f used sjmp in jumptable
4076 // optimizing jumptables
4095 ; Peephole 260.g used sjmp in jumptable
4113 // optimizing jumptables
4133 ; Peephole 260.h used sjmp in jumptable
4152 // optimizing jumptables
4173 ; Peephole 260.i used sjmp in jumptable
4193 // optimizing jumptables
4215 ; Peephole 260.j used sjmp in jumptable
4236 // optimizing jumptables
4259 ; Peephole 260.k used sjmp in jumptable
4281 // optimizing jumptables
4305 ; Peephole 260.l used sjmp in jumptable
4328 // optimizing jumptables
4353 ; Peephole 260.m used sjmp in jumptable
4377 // applies to: a = (a << 1) | (a >> 15);
4389 ; Peephole 261.a optimized left rol
4399 // applies to: a = (a << 15) | (a >> 1);
4411 ; Peephole 261.b optimized right rol
4421 // should be one of the last peepholes
4425 ; Peephole 300 removed redundant label %1
4426 } if labelRefCount(%1 0)