Rev 532 | Rev 899 | Go to most recent revision | Details | Compare with Previous | Last modification | View Log | RSS feed
| Rev | Author | Line No. | Line |
|---|---|---|---|
| 212 | vana | 1 | # |
| 2 | # Copyright (C) 2005 Jakub Vana |
||
| 478 | jermar | 3 | # Copyright (C) 2005 Jakub Jermar |
| 212 | vana | 4 | # All rights reserved. |
| 5 | # |
||
| 6 | # Redistribution and use in source and binary forms, with or without |
||
| 7 | # modification, are permitted provided that the following conditions |
||
| 8 | # are met: |
||
| 9 | # |
||
| 10 | # - Redistributions of source code must retain the above copyright |
||
| 11 | # notice, this list of conditions and the following disclaimer. |
||
| 12 | # - Redistributions in binary form must reproduce the above copyright |
||
| 13 | # notice, this list of conditions and the following disclaimer in the |
||
| 14 | # documentation and/or other materials provided with the distribution. |
||
| 15 | # - The name of the author may not be used to endorse or promote products |
||
| 16 | # derived from this software without specific prior written permission. |
||
| 17 | # |
||
| 18 | # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
||
| 19 | # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
||
| 20 | # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. |
||
| 21 | # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, |
||
| 22 | # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT |
||
| 23 | # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||
| 24 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||
| 25 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||
| 26 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
||
| 27 | # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||
| 28 | # |
||
| 29 | |||
| 443 | jermar | 30 | #include <arch/stack.h> |
| 478 | jermar | 31 | #include <arch/register.h> |
| 212 | vana | 32 | |
| 443 | jermar | 33 | #define STACK_ITEMS 12 |
| 34 | #define STACK_FRAME_SIZE ((STACK_ITEMS*STACK_ITEM_SIZE) + STACK_SCRATCH_AREA_SIZE) |
||
| 35 | |||
| 36 | #if (STACK_FRAME_SIZE % STACK_ALIGNMENT != 0) |
||
| 37 | #error Memory stack must be 16-byte aligned. |
||
| 38 | #endif |
||
| 39 | |||
| 438 | jermar | 40 | /** Heavyweight interrupt handler |
| 41 | * |
||
| 435 | jermar | 42 | * This macro roughly follows steps from 1 to 19 described in |
| 43 | * Intel Itanium Architecture Software Developer's Manual, Chapter 3.4.2. |
||
| 44 | * |
||
| 438 | jermar | 45 | * HEAVYWEIGHT_HANDLER macro must cram into 16 bundles (48 instructions). |
| 46 | * This goal is achieved by using procedure calls after RSE becomes operational. |
||
| 47 | * |
||
| 435 | jermar | 48 | * Some steps are skipped (enabling and disabling interrupts). |
| 49 | * Some steps are not fully supported yet (e.g. interruptions |
||
| 438 | jermar | 50 | * from userspace and floating-point context). |
| 456 | jermar | 51 | * |
| 52 | * @param offs Offset from the beginning of IVT. |
||
| 53 | * @param handler Interrupt handler address. |
||
| 435 | jermar | 54 | */ |
| 470 | jermar | 55 | .macro HEAVYWEIGHT_HANDLER offs, handler=universal_handler |
| 56 | .org ivt + \offs |
||
| 57 | mov r24 = \offs |
||
| 58 | movl r25 = \handler ;; |
||
| 59 | mov ar.k0 = r24 |
||
| 60 | mov ar.k1 = r25 |
||
| 61 | br heavyweight_handler |
||
| 62 | .endm |
||
| 212 | vana | 63 | |
| 470 | jermar | 64 | .global heavyweight_handler |
| 65 | heavyweight_handler: |
||
| 435 | jermar | 66 | /* 1. copy interrupt registers into bank 0 */ |
| 67 | mov r24 = cr.iip |
||
| 68 | mov r25 = cr.ipsr |
||
| 69 | mov r26 = cr.iipa |
||
| 70 | mov r27 = cr.isr |
||
| 71 | mov r28 = cr.ifa |
||
| 72 | |||
| 73 | /* 2. preserve predicate register into bank 0 */ |
||
| 74 | mov r29 = pr ;; |
||
| 75 | |||
| 438 | jermar | 76 | /* 3. switch to kernel memory stack */ |
| 435 | jermar | 77 | /* TODO: support interruptions from userspace */ |
| 78 | /* assume kernel stack */ |
||
| 79 | |||
| 443 | jermar | 80 | add r31 = -8, r12 ;; |
| 470 | jermar | 81 | add r12 = -STACK_FRAME_SIZE, r12 |
| 82 | |||
| 83 | /* 4. save registers in bank 0 into memory stack */ |
||
| 443 | jermar | 84 | st8 [r31] = r29, -8 ;; /* save predicate registers */ |
| 438 | jermar | 85 | |
| 443 | jermar | 86 | st8 [r31] = r24, -8 ;; /* save cr.iip */ |
| 87 | st8 [r31] = r25, -8 ;; /* save cr.ipsr */ |
||
| 88 | st8 [r31] = r26, -8 ;; /* save cr.iipa */ |
||
| 89 | st8 [r31] = r27, -8 ;; /* save cr.isr */ |
||
| 470 | jermar | 90 | st8 [r31] = r28, -8 /* save cr.ifa */ |
| 438 | jermar | 91 | |
| 92 | /* 5. RSE switch from interrupted context */ |
||
| 435 | jermar | 93 | mov r24 = ar.rsc |
| 94 | mov r25 = ar.pfs |
||
| 95 | cover |
||
| 96 | mov r26 = cr.ifs |
||
| 97 | |||
| 470 | jermar | 98 | st8 [r31] = r24, -8;; /* save ar.rsc */ |
| 99 | st8 [r31] = r25, -8;; /* save ar.pfs */ |
||
| 443 | jermar | 100 | st8 [r31] = r26, -8 /* save ar.ifs */ |
| 435 | jermar | 101 | |
| 470 | jermar | 102 | and r30 = ~3, r24 ;; |
| 103 | mov ar.rsc = r30 ;; /* place RSE in enforced lazy mode */ |
||
| 435 | jermar | 104 | |
| 105 | mov r27 = ar.rnat |
||
| 470 | jermar | 106 | mov r28 = ar.bspstore ;; |
| 435 | jermar | 107 | |
| 108 | /* assume kernel backing store */ |
||
| 478 | jermar | 109 | /* mov ar.bspstore = r28 ;; */ |
| 435 | jermar | 110 | |
| 111 | mov r29 = ar.bsp |
||
| 112 | |||
| 470 | jermar | 113 | st8 [r31] = r27, -8 ;; /* save ar.rnat */ |
| 114 | st8 [r31] = r28, -8 ;; /* save ar.bspstore */ |
||
| 456 | jermar | 115 | st8 [r31] = r29, -8 /* save ar.bsp */ |
| 435 | jermar | 116 | |
| 117 | mov ar.rsc = r24 /* restore RSE's setting */ |
||
| 118 | |||
| 470 | jermar | 119 | /* steps 6 - 15 are done by heavyweight_handler_inner() */ |
| 120 | mov r24 = b0 /* save b0 belonging to interrupted context */ |
||
| 121 | mov r26 = ar.k0 |
||
| 122 | mov r25 = ar.k1 |
||
| 438 | jermar | 123 | br.call.sptk.many rp = heavyweight_handler_inner |
| 470 | jermar | 124 | 0: mov b0 = r24 /* restore b0 belonging to the interrupted context */ |
| 438 | jermar | 125 | |
| 470 | jermar | 126 | /* 16. RSE switch to interrupted context */ |
| 127 | cover /* allocate zerro size frame (step 1 (from Intel Docs)) */ |
||
| 438 | jermar | 128 | |
| 470 | jermar | 129 | add r31 = STACK_SCRATCH_AREA_SIZE, r12 ;; |
| 130 | |||
| 131 | mov r28 = ar.bspstore /* calculate loadrs (step 2) */ |
||
| 132 | ld8 r29 = [r31], +8 ;; /* load ar.bsp */ |
||
| 133 | sub r27 = r29 , r28 ;; |
||
| 134 | shl r27 = r27, 16 |
||
| 135 | |||
| 136 | mov r24 = ar.rsc ;; |
||
| 137 | and r30 = ~3, r24 ;; |
||
| 138 | or r24 = r30 , r27 ;; |
||
| 139 | mov ar.rsc = r24 ;; /* place RSE in enforced lazy mode */ |
||
| 140 | |||
| 141 | loadrs /* (step 3) */ |
||
| 142 | |||
| 143 | ld8 r28 = [r31], +8 ;; /* load ar.bspstore */ |
||
| 144 | ld8 r27 = [r31], +8 ;; /* load ar.rnat */ |
||
| 145 | ld8 r26 = [r31], +8 ;; /* load cr.ifs */ |
||
| 146 | ld8 r25 = [r31], +8 ;; /* load ar.pfs */ |
||
| 147 | ld8 r24 = [r31], +8 ;; /* load ar.rsc */ |
||
| 148 | |||
| 478 | jermar | 149 | /* mov ar.bspstore = r28 ;; */ /* (step 4) */ |
| 150 | /* mov ar.rnat = r27 */ /* (step 5) */ |
||
| 470 | jermar | 151 | |
| 152 | mov ar.pfs = r25 /* (step 6) */ |
||
| 153 | mov cr.ifs = r26 |
||
| 154 | |||
| 155 | mov ar.rsc = r24 /* (step 7) */ |
||
| 156 | |||
| 157 | /* 17. restore interruption state from memory stack */ |
||
| 158 | ld8 r28 = [r31], +8 ;; /* load cr.ifa */ |
||
| 159 | ld8 r27 = [r31], +8 ;; /* load cr.isr */ |
||
| 160 | ld8 r26 = [r31], +8 ;; /* load cr.iipa */ |
||
| 161 | ld8 r25 = [r31], +8 ;; /* load cr.ipsr */ |
||
| 162 | ld8 r24 = [r31], +8 ;; /* load cr.iip */ |
||
| 163 | |||
| 164 | mov cr.iip = r24 |
||
| 165 | mov cr.ipsr = r25 |
||
| 166 | mov cr.iipa = r26 |
||
| 167 | mov cr.isr = r27 |
||
| 168 | mov cr.ifa = r28 |
||
| 169 | |||
| 170 | /* 18. restore predicate registers from memory stack */ |
||
| 171 | ld8 r29 = [r31] , -8 ;; /* load predicate registers */ |
||
| 172 | mov pr = r29 |
||
| 173 | |||
| 174 | /* 19. return from interruption */ |
||
| 175 | add r12 = STACK_FRAME_SIZE, r12 |
||
| 176 | rfi ;; |
||
| 177 | |||
| 438 | jermar | 178 | .global heavyweight_handler_inner |
| 179 | heavyweight_handler_inner: |
||
| 180 | /* |
||
| 181 | * From this point, the rest of the interrupted context |
||
| 182 | * will be preserved in stacked registers and backing store. |
||
| 183 | */ |
||
| 470 | jermar | 184 | alloc loc0 = ar.pfs, 0, 47, 2, 0 ;; |
| 438 | jermar | 185 | |
| 470 | jermar | 186 | /* bank 0 is going to be shadowed, copy essential data from there */ |
| 187 | mov loc1 = r24 /* b0 belonging to interrupted context */ |
||
| 188 | mov loc2 = r25 |
||
| 189 | mov out0 = r26 |
||
| 190 | |||
| 191 | add out1 = STACK_SCRATCH_AREA_SIZE, r12 |
||
| 438 | jermar | 192 | |
| 435 | jermar | 193 | /* 6. switch to bank 1 and reenable PSR.ic */ |
| 478 | jermar | 194 | ssm PSR_IC_MASK |
| 435 | jermar | 195 | bsw.1 ;; |
| 196 | srlz.d |
||
| 197 | |||
| 198 | /* 7. preserve branch and application registers */ |
||
| 470 | jermar | 199 | mov loc3 = ar.unat |
| 200 | mov loc4 = ar.lc |
||
| 201 | mov loc5 = ar.ec |
||
| 202 | mov loc6 = ar.ccv |
||
| 203 | mov loc7 = ar.csd |
||
| 204 | mov loc8 = ar.ssd |
||
| 435 | jermar | 205 | |
| 470 | jermar | 206 | mov loc9 = b0 |
| 207 | mov loc10 = b1 |
||
| 208 | mov loc11 = b2 |
||
| 209 | mov loc12 = b3 |
||
| 210 | mov loc13 = b4 |
||
| 211 | mov loc14 = b5 |
||
| 212 | mov loc15 = b6 |
||
| 213 | mov loc16 = b7 |
||
| 438 | jermar | 214 | |
| 435 | jermar | 215 | /* 8. preserve general and floating-point registers */ |
| 216 | /* TODO: save floating-point context */ |
||
| 470 | jermar | 217 | mov loc17 = r1 |
| 218 | mov loc18 = r2 |
||
| 219 | mov loc19 = r3 |
||
| 220 | mov loc20 = r4 |
||
| 221 | mov loc21 = r5 |
||
| 222 | mov loc22 = r6 |
||
| 223 | mov loc23 = r7 |
||
| 224 | mov loc24 = r8 |
||
| 225 | mov loc25 = r9 |
||
| 226 | mov loc26 = r10 |
||
| 227 | mov loc27 = r11 |
||
| 438 | jermar | 228 | /* skip r12 (stack pointer) */ |
| 470 | jermar | 229 | mov loc28 = r13 |
| 230 | mov loc29 = r14 |
||
| 231 | mov loc30 = r15 |
||
| 232 | mov loc31 = r16 |
||
| 233 | mov loc32 = r17 |
||
| 234 | mov loc33 = r18 |
||
| 235 | mov loc34 = r19 |
||
| 236 | mov loc35 = r20 |
||
| 237 | mov loc36 = r21 |
||
| 238 | mov loc37 = r22 |
||
| 239 | mov loc38 = r23 |
||
| 240 | mov loc39 = r24 |
||
| 241 | mov loc40 = r25 |
||
| 242 | mov loc41 = r26 |
||
| 243 | mov loc42 = r27 |
||
| 244 | mov loc43 = r28 |
||
| 245 | mov loc44 = r29 |
||
| 246 | mov loc45 = r30 |
||
| 247 | mov loc46 = r31 |
||
| 438 | jermar | 248 | |
| 435 | jermar | 249 | /* 9. skipped (will not enable interrupts) */ |
| 478 | jermar | 250 | /* |
| 251 | * ssm PSR_I_MASK |
||
| 252 | * ;; |
||
| 253 | * srlz.d |
||
| 254 | */ |
||
| 238 | vana | 255 | |
| 438 | jermar | 256 | /* 10. call handler */ |
| 470 | jermar | 257 | mov b1 = loc2 |
| 438 | jermar | 258 | br.call.sptk.many b0 = b1 |
| 259 | |||
| 260 | /* 11. return from handler */ |
||
| 261 | 0: |
||
| 262 | |||
| 435 | jermar | 263 | /* 12. skipped (will not disable interrupts) */ |
| 478 | jermar | 264 | /* |
| 265 | * rsm PSR_I_MASK |
||
| 266 | * ;; |
||
| 267 | * srlz.d |
||
| 268 | */ |
||
| 438 | jermar | 269 | |
| 435 | jermar | 270 | /* 13. restore general and floating-point registers */ |
| 271 | /* TODO: restore floating-point context */ |
||
| 470 | jermar | 272 | mov r1 = loc17 |
| 273 | mov r2 = loc18 |
||
| 274 | mov r3 = loc19 |
||
| 275 | mov r4 = loc20 |
||
| 276 | mov r5 = loc21 |
||
| 277 | mov r6 = loc22 |
||
| 278 | mov r7 = loc23 |
||
| 279 | mov r8 = loc24 |
||
| 280 | mov r9 = loc25 |
||
| 281 | mov r10 = loc26 |
||
| 282 | mov r11 = loc27 |
||
| 438 | jermar | 283 | /* skip r12 (stack pointer) */ |
| 470 | jermar | 284 | mov r13 = loc28 |
| 285 | mov r14 = loc29 |
||
| 286 | mov r15 = loc30 |
||
| 287 | mov r16 = loc31 |
||
| 288 | mov r17 = loc32 |
||
| 289 | mov r18 = loc33 |
||
| 290 | mov r19 = loc34 |
||
| 291 | mov r20 = loc35 |
||
| 292 | mov r21 = loc36 |
||
| 293 | mov r22 = loc37 |
||
| 294 | mov r23 = loc38 |
||
| 295 | mov r24 = loc39 |
||
| 296 | mov r25 = loc40 |
||
| 297 | mov r26 = loc41 |
||
| 298 | mov r27 = loc42 |
||
| 299 | mov r28 = loc43 |
||
| 300 | mov r29 = loc44 |
||
| 301 | mov r30 = loc45 |
||
| 302 | mov r31 = loc46 |
||
| 435 | jermar | 303 | |
| 304 | /* 14. restore branch and application registers */ |
||
| 470 | jermar | 305 | mov ar.unat = loc3 |
| 306 | mov ar.lc = loc4 |
||
| 307 | mov ar.ec = loc5 |
||
| 308 | mov ar.ccv = loc6 |
||
| 309 | mov ar.csd = loc7 |
||
| 310 | mov ar.ssd = loc8 |
||
| 435 | jermar | 311 | |
| 470 | jermar | 312 | mov b0 = loc9 |
| 313 | mov b1 = loc10 |
||
| 314 | mov b2 = loc11 |
||
| 315 | mov b3 = loc12 |
||
| 316 | mov b4 = loc13 |
||
| 317 | mov b5 = loc14 |
||
| 318 | mov b6 = loc15 |
||
| 319 | mov b7 = loc16 |
||
| 438 | jermar | 320 | |
| 435 | jermar | 321 | /* 15. disable PSR.ic and switch to bank 0 */ |
| 478 | jermar | 322 | rsm PSR_IC_MASK |
| 435 | jermar | 323 | bsw.0 ;; |
| 324 | srlz.d |
||
| 438 | jermar | 325 | |
| 470 | jermar | 326 | mov r24 = loc1 |
| 438 | jermar | 327 | mov ar.pfs = loc0 |
| 470 | jermar | 328 | br.ret.sptk.many b0 |
| 438 | jermar | 329 | |
| 470 | jermar | 330 | .global ivt |
| 331 | .align 32768 |
||
| 332 | ivt: |
||
| 333 | HEAVYWEIGHT_HANDLER 0x0000 |
||
| 334 | HEAVYWEIGHT_HANDLER 0x0400 |
||
| 335 | HEAVYWEIGHT_HANDLER 0x0800 |
||
| 336 | HEAVYWEIGHT_HANDLER 0x0c00 |
||
| 337 | HEAVYWEIGHT_HANDLER 0x1000 |
||
| 338 | HEAVYWEIGHT_HANDLER 0x1400 |
||
| 339 | HEAVYWEIGHT_HANDLER 0x1800 |
||
| 340 | HEAVYWEIGHT_HANDLER 0x1c00 |
||
| 341 | HEAVYWEIGHT_HANDLER 0x2000 |
||
| 342 | HEAVYWEIGHT_HANDLER 0x2400 |
||
| 343 | HEAVYWEIGHT_HANDLER 0x2800 |
||
| 344 | HEAVYWEIGHT_HANDLER 0x2c00 break_instruction |
||
| 345 | HEAVYWEIGHT_HANDLER 0x3000 external_interrupt /* For external interrupt, heavyweight handler is used. */ |
||
| 346 | HEAVYWEIGHT_HANDLER 0x3400 |
||
| 347 | HEAVYWEIGHT_HANDLER 0x3800 |
||
| 348 | HEAVYWEIGHT_HANDLER 0x3c00 |
||
| 349 | HEAVYWEIGHT_HANDLER 0x4000 |
||
| 350 | HEAVYWEIGHT_HANDLER 0x4400 |
||
| 351 | HEAVYWEIGHT_HANDLER 0x4800 |
||
| 352 | HEAVYWEIGHT_HANDLER 0x4c00 |
||
| 444 | vana | 353 | |
| 470 | jermar | 354 | HEAVYWEIGHT_HANDLER 0x5000 |
| 355 | HEAVYWEIGHT_HANDLER 0x5100 |
||
| 356 | HEAVYWEIGHT_HANDLER 0x5200 |
||
| 357 | HEAVYWEIGHT_HANDLER 0x5300 |
||
| 358 | HEAVYWEIGHT_HANDLER 0x5400 general_exception |
||
| 359 | HEAVYWEIGHT_HANDLER 0x5500 |
||
| 360 | HEAVYWEIGHT_HANDLER 0x5600 |
||
| 361 | HEAVYWEIGHT_HANDLER 0x5700 |
||
| 362 | HEAVYWEIGHT_HANDLER 0x5800 |
||
| 363 | HEAVYWEIGHT_HANDLER 0x5900 |
||
| 364 | HEAVYWEIGHT_HANDLER 0x5a00 |
||
| 365 | HEAVYWEIGHT_HANDLER 0x5b00 |
||
| 366 | HEAVYWEIGHT_HANDLER 0x5c00 |
||
| 367 | HEAVYWEIGHT_HANDLER 0x5d00 |
||
| 368 | HEAVYWEIGHT_HANDLER 0x5e00 |
||
| 369 | HEAVYWEIGHT_HANDLER 0x5f00 |
||
| 435 | jermar | 370 | |
| 470 | jermar | 371 | HEAVYWEIGHT_HANDLER 0x6000 |
| 372 | HEAVYWEIGHT_HANDLER 0x6100 |
||
| 373 | HEAVYWEIGHT_HANDLER 0x6200 |
||
| 374 | HEAVYWEIGHT_HANDLER 0x6300 |
||
| 375 | HEAVYWEIGHT_HANDLER 0x6400 |
||
| 376 | HEAVYWEIGHT_HANDLER 0x6500 |
||
| 377 | HEAVYWEIGHT_HANDLER 0x6600 |
||
| 378 | HEAVYWEIGHT_HANDLER 0x6700 |
||
| 379 | HEAVYWEIGHT_HANDLER 0x6800 |
||
| 380 | HEAVYWEIGHT_HANDLER 0x6900 |
||
| 381 | HEAVYWEIGHT_HANDLER 0x6a00 |
||
| 382 | HEAVYWEIGHT_HANDLER 0x6b00 |
||
| 383 | HEAVYWEIGHT_HANDLER 0x6c00 |
||
| 384 | HEAVYWEIGHT_HANDLER 0x6d00 |
||
| 385 | HEAVYWEIGHT_HANDLER 0x6e00 |
||
| 386 | HEAVYWEIGHT_HANDLER 0x6f00 |
||
| 435 | jermar | 387 | |
| 470 | jermar | 388 | HEAVYWEIGHT_HANDLER 0x7000 |
| 389 | HEAVYWEIGHT_HANDLER 0x7100 |
||
| 390 | HEAVYWEIGHT_HANDLER 0x7200 |
||
| 391 | HEAVYWEIGHT_HANDLER 0x7300 |
||
| 392 | HEAVYWEIGHT_HANDLER 0x7400 |
||
| 393 | HEAVYWEIGHT_HANDLER 0x7500 |
||
| 394 | HEAVYWEIGHT_HANDLER 0x7600 |
||
| 395 | HEAVYWEIGHT_HANDLER 0x7700 |
||
| 396 | HEAVYWEIGHT_HANDLER 0x7800 |
||
| 397 | HEAVYWEIGHT_HANDLER 0x7900 |
||
| 398 | HEAVYWEIGHT_HANDLER 0x7a00 |
||
| 399 | HEAVYWEIGHT_HANDLER 0x7b00 |
||
| 400 | HEAVYWEIGHT_HANDLER 0x7c00 |
||
| 401 | HEAVYWEIGHT_HANDLER 0x7d00 |
||
| 402 | HEAVYWEIGHT_HANDLER 0x7e00 |
||
| 403 | HEAVYWEIGHT_HANDLER 0x7f00 |