1 /** test one byte mul/div/mod operations.
12 {attrL} unsigned char ucL;
14 {attrR} unsigned char ucR;
15 volatile char r8 , r8b;
16 volatile unsigned char ur8, ur8b;
17 volatile short r16, r16b;
19 cL = -127; cR = -5; r16 = cL * cR; r16b = cR * cL; ASSERT(r16 == 635); ASSERT(r16b == 635);
20 cL = 127; cR = -5; r16 = cL * cR; r16b = cR * cL; ASSERT(r16 == -635); ASSERT(r16b == -635);
21 cL = -127; cR = 5; r16 = cL * cR; r16b = cR * cL; ASSERT(r16 == -635); ASSERT(r16b == -635);
22 cL = 127; cR = 5; r16 = cL * cR; r16b = cR * cL; ASSERT(r16 == 635); ASSERT(r16b == 635);
24 cL = -128; cR = -1; ur8 = cL * cR; ur8b = cR * cL; ASSERT(ur8 == 128); ASSERT(ur8b == 128);
25 cL = 128; cR = -1; r8 = cL * cR; r8b = cR * cL; ASSERT( r8 == -128); ASSERT( r8b == -128);
26 cL = -128; cR = 1; r8 = cL * cR; r8b = cR * cL; ASSERT( r8 == -128); ASSERT( r8b == -128);
27 cL = 128; cR = 1; ur8 = cL * cR; ur8b = cR * cL; ASSERT(ur8 == 128); ASSERT(ur8b == 128);
30 ucL = 128; cR = -5; r16 = ucL * cR; r16b = cR * ucL; ASSERT(r16 == -640); ASSERT(r16b == -640);
31 ucL = 128; cR = 5; r16 = ucL * cR; r16b = cR * ucL; ASSERT(r16 == 640); ASSERT(r16b == 640);
34 ucL = 127; cR = -1; r8 = ucL * cR; r8b = cR * ucL; ASSERT( r8 == -127); ASSERT( r8b == -127);
35 ucL = 128; cR = 1; ur8 = ucL * cR; ur8b = cR * ucL; ASSERT(ur8 == 128); ASSERT(ur8b == 128);
36 ucL = 128; ucR = 5; r16 = ucL * ucR; r16b = ucR * ucL; ASSERT(r16 == 640); ASSERT(r16b == 640);
37 ucL = 128; ucR = 1; ur8 = ucL * ucR; ur8b = ucR * ucL; ASSERT(ur8 == 128); ASSERT(ur8b == 128);
44 {attrL} unsigned char ucL;
46 {attrR} unsigned char ucR;
48 volatile unsigned char ur8;
51 cL = -128; cR = -2; r8 = cL / cR; ASSERT(r8 == 64);
52 cL = -128; cR = -2; r16 = cL / cR; ASSERT(r16 == 64);
54 ucL = 255; ucR = 3; r8 = ucL / ucR; ASSERT(r8 == 85);
55 ucL = 255; ucR = 255; r8 = ucL / ucR; ASSERT(r8 == 1);
56 ucL = 3; ucR = 255; r8 = ucL / ucR; ASSERT(r8 == 0);
58 cL = 127; cR = 3; r8 = cL / cR; ASSERT(r8 == 42);
59 cL = -127; cR = 3; r8 = cL / cR; ASSERT(r8 == -42);
60 cL = 127; cR = -3; r8 = cL / cR; ASSERT(r8 == -42);
61 cL = -127; cR = -3; r8 = cL / cR; ASSERT(r8 == 42);
63 ucL = 127; cR = 3; r8 = ucL / cR; ASSERT(r8 == 42);
64 ucL = 255; cR = 3; r8 = ucL / cR; ASSERT(r8 == 85);
66 ucL = 127; cR = -3; r8 = ucL / cR; ASSERT(r8 == -42);
67 ucL = 255; cR = -3; r8 = ucL / cR; ASSERT(r8 == -85);
70 cL = 127; ucR = 3; r8 = cL / ucR; ASSERT(r8 == 42);
71 cL = -127; ucR = 3; r8 = cL / ucR; ASSERT(r8 == -42);
72 cL = 127; ucR = 128; r8 = cL / ucR; ASSERT(r8 == 0);
73 cL = -127; ucR = 128; r8 = cL / ucR; ASSERT(r8 == 0);
75 cL = 127; cR = 1; r8 = cL / cR; ASSERT(r8 == 127);
76 cL = 127; cR = 1; r16 = cL / cR; ASSERT(r16 == 127);
78 ucL = 251; cR = 1; ur8 = ucL / cR; ASSERT(ur8 == 251);
79 ucL = 251; cR = 1; r16 = ucL / cR; ASSERT(r16 == 251);
82 ucL = 253; cR = -3; r8 = ucL / cR; ASSERT(r8 == -84);
83 ucL = 253; cR = -3; r16 = ucL / cR; ASSERT(r16 == -84);
85 ucL = 254; cR = -1; r16 = ucL / cR; ASSERT(r16 == -254);
87 cL = -128; cR = -1; r16 = cL / cR; ASSERT(r16 == 128);
94 {attrL} unsigned char ucL;
96 {attrR} unsigned char ucR;
98 volatile unsigned char ur8;
101 ucL = 128; cR = 5; r16 = ucL % cR; ASSERT(r16 == 3);
103 ucL = 128; cR = -5; r16 = ucL % cR; ASSERT(r16 == 3);
105 ucL = 128; ucR = 5; r16 = ucL % ucR; ASSERT(r16 == 3);
107 ucL = 128; ucR = 255; ur8 = ucL % ucR; ASSERT(ur8 == 128);
108 ucL = 128; ucR = 255; r16 = ucL % ucR; ASSERT(r16 == 128);
110 ucL = 128; cR = 127; r8 = ucL % cR; ASSERT(r8 == 1);
112 cL = 127; cR = 5; r16 = cL % cR; ASSERT(r16 == 2);
114 cL = 127; cR = -5; r16 = cL % cR; ASSERT(r16 == 2);
116 cL = 127; ucR = 5; r16 = cL % ucR; ASSERT(r16 == 2);
118 cL = -128; cR = 5; r16 = cL % cR; ASSERT(r16 == -3);
120 cL = -128; cR = -5; r16 = cL % cR; ASSERT(r16 == -3);
122 cL = -128; ucR = 5; r16 = cL % ucR; ASSERT(r16 == -3);