fp-impl.c.inc (54994B)
1/* 2 * translate-fp.c 3 * 4 * Standard FPU translation 5 */ 6 7static inline void gen_reset_fpstatus(void) 8{ 9 gen_helper_reset_fpstatus(cpu_env); 10} 11 12static inline void gen_compute_fprf_float64(TCGv_i64 arg) 13{ 14 gen_helper_compute_fprf_float64(cpu_env, arg); 15 gen_helper_float_check_status(cpu_env); 16} 17 18#if defined(TARGET_PPC64) 19static void gen_set_cr1_from_fpscr(DisasContext *ctx) 20{ 21 TCGv_i32 tmp = tcg_temp_new_i32(); 22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr); 23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28); 24 tcg_temp_free_i32(tmp); 25} 26#else 27static void gen_set_cr1_from_fpscr(DisasContext *ctx) 28{ 29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28); 30} 31#endif 32 33/*** Floating-Point arithmetic ***/ 34#define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \ 35static void gen_f##name(DisasContext *ctx) \ 36{ \ 37 TCGv_i64 t0; \ 38 TCGv_i64 t1; \ 39 TCGv_i64 t2; \ 40 TCGv_i64 t3; \ 41 if (unlikely(!ctx->fpu_enabled)) { \ 42 gen_exception(ctx, POWERPC_EXCP_FPU); \ 43 return; \ 44 } \ 45 t0 = tcg_temp_new_i64(); \ 46 t1 = tcg_temp_new_i64(); \ 47 t2 = tcg_temp_new_i64(); \ 48 t3 = tcg_temp_new_i64(); \ 49 gen_reset_fpstatus(); \ 50 get_fpr(t0, rA(ctx->opcode)); \ 51 get_fpr(t1, rC(ctx->opcode)); \ 52 get_fpr(t2, rB(ctx->opcode)); \ 53 gen_helper_f##op(t3, cpu_env, t0, t1, t2); \ 54 if (isfloat) { \ 55 gen_helper_frsp(t3, cpu_env, t3); \ 56 } \ 57 set_fpr(rD(ctx->opcode), t3); \ 58 if (set_fprf) { \ 59 gen_compute_fprf_float64(t3); \ 60 } \ 61 if (unlikely(Rc(ctx->opcode) != 0)) { \ 62 gen_set_cr1_from_fpscr(ctx); \ 63 } \ 64 tcg_temp_free_i64(t0); \ 65 tcg_temp_free_i64(t1); \ 66 tcg_temp_free_i64(t2); \ 67 tcg_temp_free_i64(t3); \ 68} 69 70#define GEN_FLOAT_ACB(name, op2, set_fprf, type) \ 71_GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \ 72_GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type); 73 74#define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \ 75static void gen_f##name(DisasContext *ctx) \ 76{ \ 77 TCGv_i64 t0; \ 78 TCGv_i64 t1; \ 79 TCGv_i64 t2; \ 80 if (unlikely(!ctx->fpu_enabled)) { \ 81 gen_exception(ctx, POWERPC_EXCP_FPU); \ 82 return; \ 83 } \ 84 t0 = tcg_temp_new_i64(); \ 85 t1 = tcg_temp_new_i64(); \ 86 t2 = tcg_temp_new_i64(); \ 87 gen_reset_fpstatus(); \ 88 get_fpr(t0, rA(ctx->opcode)); \ 89 get_fpr(t1, rB(ctx->opcode)); \ 90 gen_helper_f##op(t2, cpu_env, t0, t1); \ 91 if (isfloat) { \ 92 gen_helper_frsp(t2, cpu_env, t2); \ 93 } \ 94 set_fpr(rD(ctx->opcode), t2); \ 95 if (set_fprf) { \ 96 gen_compute_fprf_float64(t2); \ 97 } \ 98 if (unlikely(Rc(ctx->opcode) != 0)) { \ 99 gen_set_cr1_from_fpscr(ctx); \ 100 } \ 101 tcg_temp_free_i64(t0); \ 102 tcg_temp_free_i64(t1); \ 103 tcg_temp_free_i64(t2); \ 104} 105#define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \ 106_GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \ 107_GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type); 108 109#define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \ 110static void gen_f##name(DisasContext *ctx) \ 111{ \ 112 TCGv_i64 t0; \ 113 TCGv_i64 t1; \ 114 TCGv_i64 t2; \ 115 if (unlikely(!ctx->fpu_enabled)) { \ 116 gen_exception(ctx, POWERPC_EXCP_FPU); \ 117 return; \ 118 } \ 119 t0 = tcg_temp_new_i64(); \ 120 t1 = tcg_temp_new_i64(); \ 121 t2 = tcg_temp_new_i64(); \ 122 gen_reset_fpstatus(); \ 123 get_fpr(t0, rA(ctx->opcode)); \ 124 get_fpr(t1, rC(ctx->opcode)); \ 125 gen_helper_f##op(t2, cpu_env, t0, t1); \ 126 if (isfloat) { \ 127 gen_helper_frsp(t2, cpu_env, t2); \ 128 } \ 129 set_fpr(rD(ctx->opcode), t2); \ 130 if (set_fprf) { \ 131 gen_compute_fprf_float64(t2); \ 132 } \ 133 if (unlikely(Rc(ctx->opcode) != 0)) { \ 134 gen_set_cr1_from_fpscr(ctx); \ 135 } \ 136 tcg_temp_free_i64(t0); \ 137 tcg_temp_free_i64(t1); \ 138 tcg_temp_free_i64(t2); \ 139} 140#define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \ 141_GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \ 142_GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type); 143 144#define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \ 145static void gen_f##name(DisasContext *ctx) \ 146{ \ 147 TCGv_i64 t0; \ 148 TCGv_i64 t1; \ 149 if (unlikely(!ctx->fpu_enabled)) { \ 150 gen_exception(ctx, POWERPC_EXCP_FPU); \ 151 return; \ 152 } \ 153 t0 = tcg_temp_new_i64(); \ 154 t1 = tcg_temp_new_i64(); \ 155 gen_reset_fpstatus(); \ 156 get_fpr(t0, rB(ctx->opcode)); \ 157 gen_helper_f##name(t1, cpu_env, t0); \ 158 set_fpr(rD(ctx->opcode), t1); \ 159 if (set_fprf) { \ 160 gen_compute_fprf_float64(t1); \ 161 } \ 162 if (unlikely(Rc(ctx->opcode) != 0)) { \ 163 gen_set_cr1_from_fpscr(ctx); \ 164 } \ 165 tcg_temp_free_i64(t0); \ 166 tcg_temp_free_i64(t1); \ 167} 168 169#define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \ 170static void gen_f##name(DisasContext *ctx) \ 171{ \ 172 TCGv_i64 t0; \ 173 TCGv_i64 t1; \ 174 if (unlikely(!ctx->fpu_enabled)) { \ 175 gen_exception(ctx, POWERPC_EXCP_FPU); \ 176 return; \ 177 } \ 178 t0 = tcg_temp_new_i64(); \ 179 t1 = tcg_temp_new_i64(); \ 180 gen_reset_fpstatus(); \ 181 get_fpr(t0, rB(ctx->opcode)); \ 182 gen_helper_f##name(t1, cpu_env, t0); \ 183 set_fpr(rD(ctx->opcode), t1); \ 184 if (set_fprf) { \ 185 gen_compute_fprf_float64(t1); \ 186 } \ 187 if (unlikely(Rc(ctx->opcode) != 0)) { \ 188 gen_set_cr1_from_fpscr(ctx); \ 189 } \ 190 tcg_temp_free_i64(t0); \ 191 tcg_temp_free_i64(t1); \ 192} 193 194/* fadd - fadds */ 195GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT); 196/* fdiv - fdivs */ 197GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT); 198/* fmul - fmuls */ 199GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT); 200 201/* fre */ 202GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT); 203 204/* fres */ 205GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES); 206 207/* frsqrte */ 208GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE); 209 210/* frsqrtes */ 211static void gen_frsqrtes(DisasContext *ctx) 212{ 213 TCGv_i64 t0; 214 TCGv_i64 t1; 215 if (unlikely(!ctx->fpu_enabled)) { 216 gen_exception(ctx, POWERPC_EXCP_FPU); 217 return; 218 } 219 t0 = tcg_temp_new_i64(); 220 t1 = tcg_temp_new_i64(); 221 gen_reset_fpstatus(); 222 get_fpr(t0, rB(ctx->opcode)); 223 gen_helper_frsqrte(t1, cpu_env, t0); 224 gen_helper_frsp(t1, cpu_env, t1); 225 set_fpr(rD(ctx->opcode), t1); 226 gen_compute_fprf_float64(t1); 227 if (unlikely(Rc(ctx->opcode) != 0)) { 228 gen_set_cr1_from_fpscr(ctx); 229 } 230 tcg_temp_free_i64(t0); 231 tcg_temp_free_i64(t1); 232} 233 234/* fsel */ 235_GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL); 236/* fsub - fsubs */ 237GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT); 238/* Optional: */ 239 240/* fsqrt */ 241static void gen_fsqrt(DisasContext *ctx) 242{ 243 TCGv_i64 t0; 244 TCGv_i64 t1; 245 if (unlikely(!ctx->fpu_enabled)) { 246 gen_exception(ctx, POWERPC_EXCP_FPU); 247 return; 248 } 249 t0 = tcg_temp_new_i64(); 250 t1 = tcg_temp_new_i64(); 251 gen_reset_fpstatus(); 252 get_fpr(t0, rB(ctx->opcode)); 253 gen_helper_fsqrt(t1, cpu_env, t0); 254 set_fpr(rD(ctx->opcode), t1); 255 gen_compute_fprf_float64(t1); 256 if (unlikely(Rc(ctx->opcode) != 0)) { 257 gen_set_cr1_from_fpscr(ctx); 258 } 259 tcg_temp_free_i64(t0); 260 tcg_temp_free_i64(t1); 261} 262 263static void gen_fsqrts(DisasContext *ctx) 264{ 265 TCGv_i64 t0; 266 TCGv_i64 t1; 267 if (unlikely(!ctx->fpu_enabled)) { 268 gen_exception(ctx, POWERPC_EXCP_FPU); 269 return; 270 } 271 t0 = tcg_temp_new_i64(); 272 t1 = tcg_temp_new_i64(); 273 gen_reset_fpstatus(); 274 get_fpr(t0, rB(ctx->opcode)); 275 gen_helper_fsqrt(t1, cpu_env, t0); 276 gen_helper_frsp(t1, cpu_env, t1); 277 set_fpr(rD(ctx->opcode), t1); 278 gen_compute_fprf_float64(t1); 279 if (unlikely(Rc(ctx->opcode) != 0)) { 280 gen_set_cr1_from_fpscr(ctx); 281 } 282 tcg_temp_free_i64(t0); 283 tcg_temp_free_i64(t1); 284} 285 286/*** Floating-Point multiply-and-add ***/ 287/* fmadd - fmadds */ 288GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT); 289/* fmsub - fmsubs */ 290GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT); 291/* fnmadd - fnmadds */ 292GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT); 293/* fnmsub - fnmsubs */ 294GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT); 295 296/*** Floating-Point round & convert ***/ 297/* fctiw */ 298GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT); 299/* fctiwu */ 300GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206); 301/* fctiwz */ 302GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT); 303/* fctiwuz */ 304GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206); 305/* frsp */ 306GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT); 307/* fcfid */ 308GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64); 309/* fcfids */ 310GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206); 311/* fcfidu */ 312GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 313/* fcfidus */ 314GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206); 315/* fctid */ 316GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64); 317/* fctidu */ 318GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206); 319/* fctidz */ 320GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64); 321/* fctidu */ 322GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206); 323 324/* frin */ 325GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT); 326/* friz */ 327GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT); 328/* frip */ 329GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT); 330/* frim */ 331GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT); 332 333static void gen_ftdiv(DisasContext *ctx) 334{ 335 TCGv_i64 t0; 336 TCGv_i64 t1; 337 if (unlikely(!ctx->fpu_enabled)) { 338 gen_exception(ctx, POWERPC_EXCP_FPU); 339 return; 340 } 341 t0 = tcg_temp_new_i64(); 342 t1 = tcg_temp_new_i64(); 343 get_fpr(t0, rA(ctx->opcode)); 344 get_fpr(t1, rB(ctx->opcode)); 345 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1); 346 tcg_temp_free_i64(t0); 347 tcg_temp_free_i64(t1); 348} 349 350static void gen_ftsqrt(DisasContext *ctx) 351{ 352 TCGv_i64 t0; 353 if (unlikely(!ctx->fpu_enabled)) { 354 gen_exception(ctx, POWERPC_EXCP_FPU); 355 return; 356 } 357 t0 = tcg_temp_new_i64(); 358 get_fpr(t0, rB(ctx->opcode)); 359 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0); 360 tcg_temp_free_i64(t0); 361} 362 363 364 365/*** Floating-Point compare ***/ 366 367/* fcmpo */ 368static void gen_fcmpo(DisasContext *ctx) 369{ 370 TCGv_i32 crf; 371 TCGv_i64 t0; 372 TCGv_i64 t1; 373 if (unlikely(!ctx->fpu_enabled)) { 374 gen_exception(ctx, POWERPC_EXCP_FPU); 375 return; 376 } 377 t0 = tcg_temp_new_i64(); 378 t1 = tcg_temp_new_i64(); 379 gen_reset_fpstatus(); 380 crf = tcg_const_i32(crfD(ctx->opcode)); 381 get_fpr(t0, rA(ctx->opcode)); 382 get_fpr(t1, rB(ctx->opcode)); 383 gen_helper_fcmpo(cpu_env, t0, t1, crf); 384 tcg_temp_free_i32(crf); 385 gen_helper_float_check_status(cpu_env); 386 tcg_temp_free_i64(t0); 387 tcg_temp_free_i64(t1); 388} 389 390/* fcmpu */ 391static void gen_fcmpu(DisasContext *ctx) 392{ 393 TCGv_i32 crf; 394 TCGv_i64 t0; 395 TCGv_i64 t1; 396 if (unlikely(!ctx->fpu_enabled)) { 397 gen_exception(ctx, POWERPC_EXCP_FPU); 398 return; 399 } 400 t0 = tcg_temp_new_i64(); 401 t1 = tcg_temp_new_i64(); 402 gen_reset_fpstatus(); 403 crf = tcg_const_i32(crfD(ctx->opcode)); 404 get_fpr(t0, rA(ctx->opcode)); 405 get_fpr(t1, rB(ctx->opcode)); 406 gen_helper_fcmpu(cpu_env, t0, t1, crf); 407 tcg_temp_free_i32(crf); 408 gen_helper_float_check_status(cpu_env); 409 tcg_temp_free_i64(t0); 410 tcg_temp_free_i64(t1); 411} 412 413/*** Floating-point move ***/ 414/* fabs */ 415/* XXX: beware that fabs never checks for NaNs nor update FPSCR */ 416static void gen_fabs(DisasContext *ctx) 417{ 418 TCGv_i64 t0; 419 TCGv_i64 t1; 420 if (unlikely(!ctx->fpu_enabled)) { 421 gen_exception(ctx, POWERPC_EXCP_FPU); 422 return; 423 } 424 t0 = tcg_temp_new_i64(); 425 t1 = tcg_temp_new_i64(); 426 get_fpr(t0, rB(ctx->opcode)); 427 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63)); 428 set_fpr(rD(ctx->opcode), t1); 429 if (unlikely(Rc(ctx->opcode))) { 430 gen_set_cr1_from_fpscr(ctx); 431 } 432 tcg_temp_free_i64(t0); 433 tcg_temp_free_i64(t1); 434} 435 436/* fmr - fmr. */ 437/* XXX: beware that fmr never checks for NaNs nor update FPSCR */ 438static void gen_fmr(DisasContext *ctx) 439{ 440 TCGv_i64 t0; 441 if (unlikely(!ctx->fpu_enabled)) { 442 gen_exception(ctx, POWERPC_EXCP_FPU); 443 return; 444 } 445 t0 = tcg_temp_new_i64(); 446 get_fpr(t0, rB(ctx->opcode)); 447 set_fpr(rD(ctx->opcode), t0); 448 if (unlikely(Rc(ctx->opcode))) { 449 gen_set_cr1_from_fpscr(ctx); 450 } 451 tcg_temp_free_i64(t0); 452} 453 454/* fnabs */ 455/* XXX: beware that fnabs never checks for NaNs nor update FPSCR */ 456static void gen_fnabs(DisasContext *ctx) 457{ 458 TCGv_i64 t0; 459 TCGv_i64 t1; 460 if (unlikely(!ctx->fpu_enabled)) { 461 gen_exception(ctx, POWERPC_EXCP_FPU); 462 return; 463 } 464 t0 = tcg_temp_new_i64(); 465 t1 = tcg_temp_new_i64(); 466 get_fpr(t0, rB(ctx->opcode)); 467 tcg_gen_ori_i64(t1, t0, 1ULL << 63); 468 set_fpr(rD(ctx->opcode), t1); 469 if (unlikely(Rc(ctx->opcode))) { 470 gen_set_cr1_from_fpscr(ctx); 471 } 472 tcg_temp_free_i64(t0); 473 tcg_temp_free_i64(t1); 474} 475 476/* fneg */ 477/* XXX: beware that fneg never checks for NaNs nor update FPSCR */ 478static void gen_fneg(DisasContext *ctx) 479{ 480 TCGv_i64 t0; 481 TCGv_i64 t1; 482 if (unlikely(!ctx->fpu_enabled)) { 483 gen_exception(ctx, POWERPC_EXCP_FPU); 484 return; 485 } 486 t0 = tcg_temp_new_i64(); 487 t1 = tcg_temp_new_i64(); 488 get_fpr(t0, rB(ctx->opcode)); 489 tcg_gen_xori_i64(t1, t0, 1ULL << 63); 490 set_fpr(rD(ctx->opcode), t1); 491 if (unlikely(Rc(ctx->opcode))) { 492 gen_set_cr1_from_fpscr(ctx); 493 } 494 tcg_temp_free_i64(t0); 495 tcg_temp_free_i64(t1); 496} 497 498/* fcpsgn: PowerPC 2.05 specification */ 499/* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */ 500static void gen_fcpsgn(DisasContext *ctx) 501{ 502 TCGv_i64 t0; 503 TCGv_i64 t1; 504 TCGv_i64 t2; 505 if (unlikely(!ctx->fpu_enabled)) { 506 gen_exception(ctx, POWERPC_EXCP_FPU); 507 return; 508 } 509 t0 = tcg_temp_new_i64(); 510 t1 = tcg_temp_new_i64(); 511 t2 = tcg_temp_new_i64(); 512 get_fpr(t0, rA(ctx->opcode)); 513 get_fpr(t1, rB(ctx->opcode)); 514 tcg_gen_deposit_i64(t2, t0, t1, 0, 63); 515 set_fpr(rD(ctx->opcode), t2); 516 if (unlikely(Rc(ctx->opcode))) { 517 gen_set_cr1_from_fpscr(ctx); 518 } 519 tcg_temp_free_i64(t0); 520 tcg_temp_free_i64(t1); 521 tcg_temp_free_i64(t2); 522} 523 524static void gen_fmrgew(DisasContext *ctx) 525{ 526 TCGv_i64 b0; 527 TCGv_i64 t0; 528 TCGv_i64 t1; 529 if (unlikely(!ctx->fpu_enabled)) { 530 gen_exception(ctx, POWERPC_EXCP_FPU); 531 return; 532 } 533 b0 = tcg_temp_new_i64(); 534 t0 = tcg_temp_new_i64(); 535 t1 = tcg_temp_new_i64(); 536 get_fpr(t0, rB(ctx->opcode)); 537 tcg_gen_shri_i64(b0, t0, 32); 538 get_fpr(t0, rA(ctx->opcode)); 539 tcg_gen_deposit_i64(t1, t0, b0, 0, 32); 540 set_fpr(rD(ctx->opcode), t1); 541 tcg_temp_free_i64(b0); 542 tcg_temp_free_i64(t0); 543 tcg_temp_free_i64(t1); 544} 545 546static void gen_fmrgow(DisasContext *ctx) 547{ 548 TCGv_i64 t0; 549 TCGv_i64 t1; 550 TCGv_i64 t2; 551 if (unlikely(!ctx->fpu_enabled)) { 552 gen_exception(ctx, POWERPC_EXCP_FPU); 553 return; 554 } 555 t0 = tcg_temp_new_i64(); 556 t1 = tcg_temp_new_i64(); 557 t2 = tcg_temp_new_i64(); 558 get_fpr(t0, rB(ctx->opcode)); 559 get_fpr(t1, rA(ctx->opcode)); 560 tcg_gen_deposit_i64(t2, t0, t1, 32, 32); 561 set_fpr(rD(ctx->opcode), t2); 562 tcg_temp_free_i64(t0); 563 tcg_temp_free_i64(t1); 564 tcg_temp_free_i64(t2); 565} 566 567/*** Floating-Point status & ctrl register ***/ 568 569/* mcrfs */ 570static void gen_mcrfs(DisasContext *ctx) 571{ 572 TCGv tmp = tcg_temp_new(); 573 TCGv_i32 tmask; 574 TCGv_i64 tnew_fpscr = tcg_temp_new_i64(); 575 int bfa; 576 int nibble; 577 int shift; 578 579 if (unlikely(!ctx->fpu_enabled)) { 580 gen_exception(ctx, POWERPC_EXCP_FPU); 581 return; 582 } 583 bfa = crfS(ctx->opcode); 584 nibble = 7 - bfa; 585 shift = 4 * nibble; 586 tcg_gen_shri_tl(tmp, cpu_fpscr, shift); 587 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp); 588 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 589 0xf); 590 tcg_temp_free(tmp); 591 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr); 592 /* Only the exception bits (including FX) should be cleared if read */ 593 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr, 594 ~((0xF << shift) & FP_EX_CLEAR_BITS)); 595 /* FEX and VX need to be updated, so don't set fpscr directly */ 596 tmask = tcg_const_i32(1 << nibble); 597 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask); 598 tcg_temp_free_i32(tmask); 599 tcg_temp_free_i64(tnew_fpscr); 600} 601 602/* mffs */ 603static void gen_mffs(DisasContext *ctx) 604{ 605 TCGv_i64 t0; 606 if (unlikely(!ctx->fpu_enabled)) { 607 gen_exception(ctx, POWERPC_EXCP_FPU); 608 return; 609 } 610 t0 = tcg_temp_new_i64(); 611 gen_reset_fpstatus(); 612 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 613 set_fpr(rD(ctx->opcode), t0); 614 if (unlikely(Rc(ctx->opcode))) { 615 gen_set_cr1_from_fpscr(ctx); 616 } 617 tcg_temp_free_i64(t0); 618} 619 620/* mffsl */ 621static void gen_mffsl(DisasContext *ctx) 622{ 623 TCGv_i64 t0; 624 625 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 626 return gen_mffs(ctx); 627 } 628 629 if (unlikely(!ctx->fpu_enabled)) { 630 gen_exception(ctx, POWERPC_EXCP_FPU); 631 return; 632 } 633 t0 = tcg_temp_new_i64(); 634 gen_reset_fpstatus(); 635 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 636 /* Mask everything except mode, status, and enables. */ 637 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN); 638 set_fpr(rD(ctx->opcode), t0); 639 tcg_temp_free_i64(t0); 640} 641 642/* mffsce */ 643static void gen_mffsce(DisasContext *ctx) 644{ 645 TCGv_i64 t0; 646 TCGv_i32 mask; 647 648 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 649 return gen_mffs(ctx); 650 } 651 652 if (unlikely(!ctx->fpu_enabled)) { 653 gen_exception(ctx, POWERPC_EXCP_FPU); 654 return; 655 } 656 657 t0 = tcg_temp_new_i64(); 658 659 gen_reset_fpstatus(); 660 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 661 set_fpr(rD(ctx->opcode), t0); 662 663 /* Clear exception enable bits in the FPSCR. */ 664 tcg_gen_andi_i64(t0, t0, ~FP_ENABLES); 665 mask = tcg_const_i32(0x0003); 666 gen_helper_store_fpscr(cpu_env, t0, mask); 667 668 tcg_temp_free_i32(mask); 669 tcg_temp_free_i64(t0); 670} 671 672static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1) 673{ 674 TCGv_i64 t0 = tcg_temp_new_i64(); 675 TCGv_i32 mask = tcg_const_i32(0x0001); 676 677 gen_reset_fpstatus(); 678 tcg_gen_extu_tl_i64(t0, cpu_fpscr); 679 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN); 680 set_fpr(rD(ctx->opcode), t0); 681 682 /* Mask FPSCR value to clear RN. */ 683 tcg_gen_andi_i64(t0, t0, ~FP_RN); 684 685 /* Merge RN into FPSCR value. */ 686 tcg_gen_or_i64(t0, t0, t1); 687 688 gen_helper_store_fpscr(cpu_env, t0, mask); 689 690 tcg_temp_free_i32(mask); 691 tcg_temp_free_i64(t0); 692} 693 694/* mffscrn */ 695static void gen_mffscrn(DisasContext *ctx) 696{ 697 TCGv_i64 t1; 698 699 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 700 return gen_mffs(ctx); 701 } 702 703 if (unlikely(!ctx->fpu_enabled)) { 704 gen_exception(ctx, POWERPC_EXCP_FPU); 705 return; 706 } 707 708 t1 = tcg_temp_new_i64(); 709 get_fpr(t1, rB(ctx->opcode)); 710 /* Mask FRB to get just RN. */ 711 tcg_gen_andi_i64(t1, t1, FP_RN); 712 713 gen_helper_mffscrn(ctx, t1); 714 715 tcg_temp_free_i64(t1); 716} 717 718/* mffscrni */ 719static void gen_mffscrni(DisasContext *ctx) 720{ 721 TCGv_i64 t1; 722 723 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) { 724 return gen_mffs(ctx); 725 } 726 727 if (unlikely(!ctx->fpu_enabled)) { 728 gen_exception(ctx, POWERPC_EXCP_FPU); 729 return; 730 } 731 732 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode)); 733 734 gen_helper_mffscrn(ctx, t1); 735 736 tcg_temp_free_i64(t1); 737} 738 739/* mtfsb0 */ 740static void gen_mtfsb0(DisasContext *ctx) 741{ 742 uint8_t crb; 743 744 if (unlikely(!ctx->fpu_enabled)) { 745 gen_exception(ctx, POWERPC_EXCP_FPU); 746 return; 747 } 748 crb = 31 - crbD(ctx->opcode); 749 gen_reset_fpstatus(); 750 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) { 751 TCGv_i32 t0; 752 t0 = tcg_const_i32(crb); 753 gen_helper_fpscr_clrbit(cpu_env, t0); 754 tcg_temp_free_i32(t0); 755 } 756 if (unlikely(Rc(ctx->opcode) != 0)) { 757 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 758 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 759 } 760} 761 762/* mtfsb1 */ 763static void gen_mtfsb1(DisasContext *ctx) 764{ 765 uint8_t crb; 766 767 if (unlikely(!ctx->fpu_enabled)) { 768 gen_exception(ctx, POWERPC_EXCP_FPU); 769 return; 770 } 771 crb = 31 - crbD(ctx->opcode); 772 gen_reset_fpstatus(); 773 /* XXX: we pretend we can only do IEEE floating-point computations */ 774 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) { 775 TCGv_i32 t0; 776 t0 = tcg_const_i32(crb); 777 gen_helper_fpscr_setbit(cpu_env, t0); 778 tcg_temp_free_i32(t0); 779 } 780 if (unlikely(Rc(ctx->opcode) != 0)) { 781 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 782 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 783 } 784 /* We can raise a deferred exception */ 785 gen_helper_float_check_status(cpu_env); 786} 787 788/* mtfsf */ 789static void gen_mtfsf(DisasContext *ctx) 790{ 791 TCGv_i32 t0; 792 TCGv_i64 t1; 793 int flm, l, w; 794 795 if (unlikely(!ctx->fpu_enabled)) { 796 gen_exception(ctx, POWERPC_EXCP_FPU); 797 return; 798 } 799 flm = FPFLM(ctx->opcode); 800 l = FPL(ctx->opcode); 801 w = FPW(ctx->opcode); 802 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 803 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 804 return; 805 } 806 gen_reset_fpstatus(); 807 if (l) { 808 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff); 809 } else { 810 t0 = tcg_const_i32(flm << (w * 8)); 811 } 812 t1 = tcg_temp_new_i64(); 813 get_fpr(t1, rB(ctx->opcode)); 814 gen_helper_store_fpscr(cpu_env, t1, t0); 815 tcg_temp_free_i32(t0); 816 if (unlikely(Rc(ctx->opcode) != 0)) { 817 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 818 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 819 } 820 /* We can raise a deferred exception */ 821 gen_helper_float_check_status(cpu_env); 822 tcg_temp_free_i64(t1); 823} 824 825/* mtfsfi */ 826static void gen_mtfsfi(DisasContext *ctx) 827{ 828 int bf, sh, w; 829 TCGv_i64 t0; 830 TCGv_i32 t1; 831 832 if (unlikely(!ctx->fpu_enabled)) { 833 gen_exception(ctx, POWERPC_EXCP_FPU); 834 return; 835 } 836 w = FPW(ctx->opcode); 837 bf = FPBF(ctx->opcode); 838 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) { 839 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 840 return; 841 } 842 sh = (8 * w) + 7 - bf; 843 gen_reset_fpstatus(); 844 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh)); 845 t1 = tcg_const_i32(1 << sh); 846 gen_helper_store_fpscr(cpu_env, t0, t1); 847 tcg_temp_free_i64(t0); 848 tcg_temp_free_i32(t1); 849 if (unlikely(Rc(ctx->opcode) != 0)) { 850 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr); 851 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX); 852 } 853 /* We can raise a deferred exception */ 854 gen_helper_float_check_status(cpu_env); 855} 856 857/*** Floating-point load ***/ 858#define GEN_LDF(name, ldop, opc, type) \ 859static void glue(gen_, name)(DisasContext *ctx) \ 860{ \ 861 TCGv EA; \ 862 TCGv_i64 t0; \ 863 if (unlikely(!ctx->fpu_enabled)) { \ 864 gen_exception(ctx, POWERPC_EXCP_FPU); \ 865 return; \ 866 } \ 867 gen_set_access_type(ctx, ACCESS_FLOAT); \ 868 EA = tcg_temp_new(); \ 869 t0 = tcg_temp_new_i64(); \ 870 gen_addr_imm_index(ctx, EA, 0); \ 871 gen_qemu_##ldop(ctx, t0, EA); \ 872 set_fpr(rD(ctx->opcode), t0); \ 873 tcg_temp_free(EA); \ 874 tcg_temp_free_i64(t0); \ 875} 876 877#define GEN_LDUF(name, ldop, opc, type) \ 878static void glue(gen_, name##u)(DisasContext *ctx) \ 879{ \ 880 TCGv EA; \ 881 TCGv_i64 t0; \ 882 if (unlikely(!ctx->fpu_enabled)) { \ 883 gen_exception(ctx, POWERPC_EXCP_FPU); \ 884 return; \ 885 } \ 886 if (unlikely(rA(ctx->opcode) == 0)) { \ 887 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 888 return; \ 889 } \ 890 gen_set_access_type(ctx, ACCESS_FLOAT); \ 891 EA = tcg_temp_new(); \ 892 t0 = tcg_temp_new_i64(); \ 893 gen_addr_imm_index(ctx, EA, 0); \ 894 gen_qemu_##ldop(ctx, t0, EA); \ 895 set_fpr(rD(ctx->opcode), t0); \ 896 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 897 tcg_temp_free(EA); \ 898 tcg_temp_free_i64(t0); \ 899} 900 901#define GEN_LDUXF(name, ldop, opc, type) \ 902static void glue(gen_, name##ux)(DisasContext *ctx) \ 903{ \ 904 TCGv EA; \ 905 TCGv_i64 t0; \ 906 if (unlikely(!ctx->fpu_enabled)) { \ 907 gen_exception(ctx, POWERPC_EXCP_FPU); \ 908 return; \ 909 } \ 910 t0 = tcg_temp_new_i64(); \ 911 if (unlikely(rA(ctx->opcode) == 0)) { \ 912 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 913 return; \ 914 } \ 915 gen_set_access_type(ctx, ACCESS_FLOAT); \ 916 EA = tcg_temp_new(); \ 917 gen_addr_reg_index(ctx, EA); \ 918 gen_qemu_##ldop(ctx, t0, EA); \ 919 set_fpr(rD(ctx->opcode), t0); \ 920 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 921 tcg_temp_free(EA); \ 922 tcg_temp_free_i64(t0); \ 923} 924 925#define GEN_LDXF(name, ldop, opc2, opc3, type) \ 926static void glue(gen_, name##x)(DisasContext *ctx) \ 927{ \ 928 TCGv EA; \ 929 TCGv_i64 t0; \ 930 if (unlikely(!ctx->fpu_enabled)) { \ 931 gen_exception(ctx, POWERPC_EXCP_FPU); \ 932 return; \ 933 } \ 934 gen_set_access_type(ctx, ACCESS_FLOAT); \ 935 EA = tcg_temp_new(); \ 936 t0 = tcg_temp_new_i64(); \ 937 gen_addr_reg_index(ctx, EA); \ 938 gen_qemu_##ldop(ctx, t0, EA); \ 939 set_fpr(rD(ctx->opcode), t0); \ 940 tcg_temp_free(EA); \ 941 tcg_temp_free_i64(t0); \ 942} 943 944#define GEN_LDFS(name, ldop, op, type) \ 945GEN_LDF(name, ldop, op | 0x20, type); \ 946GEN_LDUF(name, ldop, op | 0x21, type); \ 947GEN_LDUXF(name, ldop, op | 0x01, type); \ 948GEN_LDXF(name, ldop, 0x17, op | 0x00, type) 949 950static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr) 951{ 952 TCGv_i32 tmp = tcg_temp_new_i32(); 953 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 954 gen_helper_todouble(dest, tmp); 955 tcg_temp_free_i32(tmp); 956} 957 958 /* lfd lfdu lfdux lfdx */ 959GEN_LDFS(lfd, ld64_i64, 0x12, PPC_FLOAT); 960 /* lfs lfsu lfsux lfsx */ 961GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT); 962 963/* lfdepx (external PID lfdx) */ 964static void gen_lfdepx(DisasContext *ctx) 965{ 966 TCGv EA; 967 TCGv_i64 t0; 968 CHK_SV; 969 if (unlikely(!ctx->fpu_enabled)) { 970 gen_exception(ctx, POWERPC_EXCP_FPU); 971 return; 972 } 973 gen_set_access_type(ctx, ACCESS_FLOAT); 974 EA = tcg_temp_new(); 975 t0 = tcg_temp_new_i64(); 976 gen_addr_reg_index(ctx, EA); 977 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_Q)); 978 set_fpr(rD(ctx->opcode), t0); 979 tcg_temp_free(EA); 980 tcg_temp_free_i64(t0); 981} 982 983/* lfdp */ 984static void gen_lfdp(DisasContext *ctx) 985{ 986 TCGv EA; 987 TCGv_i64 t0; 988 if (unlikely(!ctx->fpu_enabled)) { 989 gen_exception(ctx, POWERPC_EXCP_FPU); 990 return; 991 } 992 gen_set_access_type(ctx, ACCESS_FLOAT); 993 EA = tcg_temp_new(); 994 gen_addr_imm_index(ctx, EA, 0); 995 t0 = tcg_temp_new_i64(); 996 /* 997 * We only need to swap high and low halves. gen_qemu_ld64_i64 998 * does necessary 64-bit byteswap already. 999 */ 1000 if (unlikely(ctx->le_mode)) { 1001 gen_qemu_ld64_i64(ctx, t0, EA); 1002 set_fpr(rD(ctx->opcode) + 1, t0); 1003 tcg_gen_addi_tl(EA, EA, 8); 1004 gen_qemu_ld64_i64(ctx, t0, EA); 1005 set_fpr(rD(ctx->opcode), t0); 1006 } else { 1007 gen_qemu_ld64_i64(ctx, t0, EA); 1008 set_fpr(rD(ctx->opcode), t0); 1009 tcg_gen_addi_tl(EA, EA, 8); 1010 gen_qemu_ld64_i64(ctx, t0, EA); 1011 set_fpr(rD(ctx->opcode) + 1, t0); 1012 } 1013 tcg_temp_free(EA); 1014 tcg_temp_free_i64(t0); 1015} 1016 1017/* lfdpx */ 1018static void gen_lfdpx(DisasContext *ctx) 1019{ 1020 TCGv EA; 1021 TCGv_i64 t0; 1022 if (unlikely(!ctx->fpu_enabled)) { 1023 gen_exception(ctx, POWERPC_EXCP_FPU); 1024 return; 1025 } 1026 gen_set_access_type(ctx, ACCESS_FLOAT); 1027 EA = tcg_temp_new(); 1028 gen_addr_reg_index(ctx, EA); 1029 t0 = tcg_temp_new_i64(); 1030 /* 1031 * We only need to swap high and low halves. gen_qemu_ld64_i64 1032 * does necessary 64-bit byteswap already. 1033 */ 1034 if (unlikely(ctx->le_mode)) { 1035 gen_qemu_ld64_i64(ctx, t0, EA); 1036 set_fpr(rD(ctx->opcode) + 1, t0); 1037 tcg_gen_addi_tl(EA, EA, 8); 1038 gen_qemu_ld64_i64(ctx, t0, EA); 1039 set_fpr(rD(ctx->opcode), t0); 1040 } else { 1041 gen_qemu_ld64_i64(ctx, t0, EA); 1042 set_fpr(rD(ctx->opcode), t0); 1043 tcg_gen_addi_tl(EA, EA, 8); 1044 gen_qemu_ld64_i64(ctx, t0, EA); 1045 set_fpr(rD(ctx->opcode) + 1, t0); 1046 } 1047 tcg_temp_free(EA); 1048 tcg_temp_free_i64(t0); 1049} 1050 1051/* lfiwax */ 1052static void gen_lfiwax(DisasContext *ctx) 1053{ 1054 TCGv EA; 1055 TCGv t0; 1056 TCGv_i64 t1; 1057 if (unlikely(!ctx->fpu_enabled)) { 1058 gen_exception(ctx, POWERPC_EXCP_FPU); 1059 return; 1060 } 1061 gen_set_access_type(ctx, ACCESS_FLOAT); 1062 EA = tcg_temp_new(); 1063 t0 = tcg_temp_new(); 1064 t1 = tcg_temp_new_i64(); 1065 gen_addr_reg_index(ctx, EA); 1066 gen_qemu_ld32s(ctx, t0, EA); 1067 tcg_gen_ext_tl_i64(t1, t0); 1068 set_fpr(rD(ctx->opcode), t1); 1069 tcg_temp_free(EA); 1070 tcg_temp_free(t0); 1071 tcg_temp_free_i64(t1); 1072} 1073 1074/* lfiwzx */ 1075static void gen_lfiwzx(DisasContext *ctx) 1076{ 1077 TCGv EA; 1078 TCGv_i64 t0; 1079 if (unlikely(!ctx->fpu_enabled)) { 1080 gen_exception(ctx, POWERPC_EXCP_FPU); 1081 return; 1082 } 1083 gen_set_access_type(ctx, ACCESS_FLOAT); 1084 EA = tcg_temp_new(); 1085 t0 = tcg_temp_new_i64(); 1086 gen_addr_reg_index(ctx, EA); 1087 gen_qemu_ld32u_i64(ctx, t0, EA); 1088 set_fpr(rD(ctx->opcode), t0); 1089 tcg_temp_free(EA); 1090 tcg_temp_free_i64(t0); 1091} 1092/*** Floating-point store ***/ 1093#define GEN_STF(name, stop, opc, type) \ 1094static void glue(gen_, name)(DisasContext *ctx) \ 1095{ \ 1096 TCGv EA; \ 1097 TCGv_i64 t0; \ 1098 if (unlikely(!ctx->fpu_enabled)) { \ 1099 gen_exception(ctx, POWERPC_EXCP_FPU); \ 1100 return; \ 1101 } \ 1102 gen_set_access_type(ctx, ACCESS_FLOAT); \ 1103 EA = tcg_temp_new(); \ 1104 t0 = tcg_temp_new_i64(); \ 1105 gen_addr_imm_index(ctx, EA, 0); \ 1106 get_fpr(t0, rS(ctx->opcode)); \ 1107 gen_qemu_##stop(ctx, t0, EA); \ 1108 tcg_temp_free(EA); \ 1109 tcg_temp_free_i64(t0); \ 1110} 1111 1112#define GEN_STUF(name, stop, opc, type) \ 1113static void glue(gen_, name##u)(DisasContext *ctx) \ 1114{ \ 1115 TCGv EA; \ 1116 TCGv_i64 t0; \ 1117 if (unlikely(!ctx->fpu_enabled)) { \ 1118 gen_exception(ctx, POWERPC_EXCP_FPU); \ 1119 return; \ 1120 } \ 1121 if (unlikely(rA(ctx->opcode) == 0)) { \ 1122 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 1123 return; \ 1124 } \ 1125 gen_set_access_type(ctx, ACCESS_FLOAT); \ 1126 EA = tcg_temp_new(); \ 1127 t0 = tcg_temp_new_i64(); \ 1128 gen_addr_imm_index(ctx, EA, 0); \ 1129 get_fpr(t0, rS(ctx->opcode)); \ 1130 gen_qemu_##stop(ctx, t0, EA); \ 1131 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 1132 tcg_temp_free(EA); \ 1133 tcg_temp_free_i64(t0); \ 1134} 1135 1136#define GEN_STUXF(name, stop, opc, type) \ 1137static void glue(gen_, name##ux)(DisasContext *ctx) \ 1138{ \ 1139 TCGv EA; \ 1140 TCGv_i64 t0; \ 1141 if (unlikely(!ctx->fpu_enabled)) { \ 1142 gen_exception(ctx, POWERPC_EXCP_FPU); \ 1143 return; \ 1144 } \ 1145 if (unlikely(rA(ctx->opcode) == 0)) { \ 1146 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ 1147 return; \ 1148 } \ 1149 gen_set_access_type(ctx, ACCESS_FLOAT); \ 1150 EA = tcg_temp_new(); \ 1151 t0 = tcg_temp_new_i64(); \ 1152 gen_addr_reg_index(ctx, EA); \ 1153 get_fpr(t0, rS(ctx->opcode)); \ 1154 gen_qemu_##stop(ctx, t0, EA); \ 1155 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ 1156 tcg_temp_free(EA); \ 1157 tcg_temp_free_i64(t0); \ 1158} 1159 1160#define GEN_STXF(name, stop, opc2, opc3, type) \ 1161static void glue(gen_, name##x)(DisasContext *ctx) \ 1162{ \ 1163 TCGv EA; \ 1164 TCGv_i64 t0; \ 1165 if (unlikely(!ctx->fpu_enabled)) { \ 1166 gen_exception(ctx, POWERPC_EXCP_FPU); \ 1167 return; \ 1168 } \ 1169 gen_set_access_type(ctx, ACCESS_FLOAT); \ 1170 EA = tcg_temp_new(); \ 1171 t0 = tcg_temp_new_i64(); \ 1172 gen_addr_reg_index(ctx, EA); \ 1173 get_fpr(t0, rS(ctx->opcode)); \ 1174 gen_qemu_##stop(ctx, t0, EA); \ 1175 tcg_temp_free(EA); \ 1176 tcg_temp_free_i64(t0); \ 1177} 1178 1179#define GEN_STFS(name, stop, op, type) \ 1180GEN_STF(name, stop, op | 0x20, type); \ 1181GEN_STUF(name, stop, op | 0x21, type); \ 1182GEN_STUXF(name, stop, op | 0x01, type); \ 1183GEN_STXF(name, stop, 0x17, op | 0x00, type) 1184 1185static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr) 1186{ 1187 TCGv_i32 tmp = tcg_temp_new_i32(); 1188 gen_helper_tosingle(tmp, src); 1189 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL)); 1190 tcg_temp_free_i32(tmp); 1191} 1192 1193/* stfd stfdu stfdux stfdx */ 1194GEN_STFS(stfd, st64_i64, 0x16, PPC_FLOAT); 1195/* stfs stfsu stfsux stfsx */ 1196GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT); 1197 1198/* stfdepx (external PID lfdx) */ 1199static void gen_stfdepx(DisasContext *ctx) 1200{ 1201 TCGv EA; 1202 TCGv_i64 t0; 1203 CHK_SV; 1204 if (unlikely(!ctx->fpu_enabled)) { 1205 gen_exception(ctx, POWERPC_EXCP_FPU); 1206 return; 1207 } 1208 gen_set_access_type(ctx, ACCESS_FLOAT); 1209 EA = tcg_temp_new(); 1210 t0 = tcg_temp_new_i64(); 1211 gen_addr_reg_index(ctx, EA); 1212 get_fpr(t0, rD(ctx->opcode)); 1213 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_Q)); 1214 tcg_temp_free(EA); 1215 tcg_temp_free_i64(t0); 1216} 1217 1218/* stfdp */ 1219static void gen_stfdp(DisasContext *ctx) 1220{ 1221 TCGv EA; 1222 TCGv_i64 t0; 1223 if (unlikely(!ctx->fpu_enabled)) { 1224 gen_exception(ctx, POWERPC_EXCP_FPU); 1225 return; 1226 } 1227 gen_set_access_type(ctx, ACCESS_FLOAT); 1228 EA = tcg_temp_new(); 1229 t0 = tcg_temp_new_i64(); 1230 gen_addr_imm_index(ctx, EA, 0); 1231 /* 1232 * We only need to swap high and low halves. gen_qemu_st64_i64 1233 * does necessary 64-bit byteswap already. 1234 */ 1235 if (unlikely(ctx->le_mode)) { 1236 get_fpr(t0, rD(ctx->opcode) + 1); 1237 gen_qemu_st64_i64(ctx, t0, EA); 1238 tcg_gen_addi_tl(EA, EA, 8); 1239 get_fpr(t0, rD(ctx->opcode)); 1240 gen_qemu_st64_i64(ctx, t0, EA); 1241 } else { 1242 get_fpr(t0, rD(ctx->opcode)); 1243 gen_qemu_st64_i64(ctx, t0, EA); 1244 tcg_gen_addi_tl(EA, EA, 8); 1245 get_fpr(t0, rD(ctx->opcode) + 1); 1246 gen_qemu_st64_i64(ctx, t0, EA); 1247 } 1248 tcg_temp_free(EA); 1249 tcg_temp_free_i64(t0); 1250} 1251 1252/* stfdpx */ 1253static void gen_stfdpx(DisasContext *ctx) 1254{ 1255 TCGv EA; 1256 TCGv_i64 t0; 1257 if (unlikely(!ctx->fpu_enabled)) { 1258 gen_exception(ctx, POWERPC_EXCP_FPU); 1259 return; 1260 } 1261 gen_set_access_type(ctx, ACCESS_FLOAT); 1262 EA = tcg_temp_new(); 1263 t0 = tcg_temp_new_i64(); 1264 gen_addr_reg_index(ctx, EA); 1265 /* 1266 * We only need to swap high and low halves. gen_qemu_st64_i64 1267 * does necessary 64-bit byteswap already. 1268 */ 1269 if (unlikely(ctx->le_mode)) { 1270 get_fpr(t0, rD(ctx->opcode) + 1); 1271 gen_qemu_st64_i64(ctx, t0, EA); 1272 tcg_gen_addi_tl(EA, EA, 8); 1273 get_fpr(t0, rD(ctx->opcode)); 1274 gen_qemu_st64_i64(ctx, t0, EA); 1275 } else { 1276 get_fpr(t0, rD(ctx->opcode)); 1277 gen_qemu_st64_i64(ctx, t0, EA); 1278 tcg_gen_addi_tl(EA, EA, 8); 1279 get_fpr(t0, rD(ctx->opcode) + 1); 1280 gen_qemu_st64_i64(ctx, t0, EA); 1281 } 1282 tcg_temp_free(EA); 1283 tcg_temp_free_i64(t0); 1284} 1285 1286/* Optional: */ 1287static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) 1288{ 1289 TCGv t0 = tcg_temp_new(); 1290 tcg_gen_trunc_i64_tl(t0, arg1), 1291 gen_qemu_st32(ctx, t0, arg2); 1292 tcg_temp_free(t0); 1293} 1294/* stfiwx */ 1295GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX); 1296 1297/* POWER2 specific instructions */ 1298/* Quad manipulation (load/store two floats at a time) */ 1299 1300/* lfq */ 1301static void gen_lfq(DisasContext *ctx) 1302{ 1303 int rd = rD(ctx->opcode); 1304 TCGv t0; 1305 TCGv_i64 t1; 1306 gen_set_access_type(ctx, ACCESS_FLOAT); 1307 t0 = tcg_temp_new(); 1308 t1 = tcg_temp_new_i64(); 1309 gen_addr_imm_index(ctx, t0, 0); 1310 gen_qemu_ld64_i64(ctx, t1, t0); 1311 set_fpr(rd, t1); 1312 gen_addr_add(ctx, t0, t0, 8); 1313 gen_qemu_ld64_i64(ctx, t1, t0); 1314 set_fpr((rd + 1) % 32, t1); 1315 tcg_temp_free(t0); 1316 tcg_temp_free_i64(t1); 1317} 1318 1319/* lfqu */ 1320static void gen_lfqu(DisasContext *ctx) 1321{ 1322 int ra = rA(ctx->opcode); 1323 int rd = rD(ctx->opcode); 1324 TCGv t0, t1; 1325 TCGv_i64 t2; 1326 gen_set_access_type(ctx, ACCESS_FLOAT); 1327 t0 = tcg_temp_new(); 1328 t1 = tcg_temp_new(); 1329 t2 = tcg_temp_new_i64(); 1330 gen_addr_imm_index(ctx, t0, 0); 1331 gen_qemu_ld64_i64(ctx, t2, t0); 1332 set_fpr(rd, t2); 1333 gen_addr_add(ctx, t1, t0, 8); 1334 gen_qemu_ld64_i64(ctx, t2, t1); 1335 set_fpr((rd + 1) % 32, t2); 1336 if (ra != 0) { 1337 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1338 } 1339 tcg_temp_free(t0); 1340 tcg_temp_free(t1); 1341 tcg_temp_free_i64(t2); 1342} 1343 1344/* lfqux */ 1345static void gen_lfqux(DisasContext *ctx) 1346{ 1347 int ra = rA(ctx->opcode); 1348 int rd = rD(ctx->opcode); 1349 gen_set_access_type(ctx, ACCESS_FLOAT); 1350 TCGv t0, t1; 1351 TCGv_i64 t2; 1352 t2 = tcg_temp_new_i64(); 1353 t0 = tcg_temp_new(); 1354 gen_addr_reg_index(ctx, t0); 1355 gen_qemu_ld64_i64(ctx, t2, t0); 1356 set_fpr(rd, t2); 1357 t1 = tcg_temp_new(); 1358 gen_addr_add(ctx, t1, t0, 8); 1359 gen_qemu_ld64_i64(ctx, t2, t1); 1360 set_fpr((rd + 1) % 32, t2); 1361 tcg_temp_free(t1); 1362 if (ra != 0) { 1363 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1364 } 1365 tcg_temp_free(t0); 1366 tcg_temp_free_i64(t2); 1367} 1368 1369/* lfqx */ 1370static void gen_lfqx(DisasContext *ctx) 1371{ 1372 int rd = rD(ctx->opcode); 1373 TCGv t0; 1374 TCGv_i64 t1; 1375 gen_set_access_type(ctx, ACCESS_FLOAT); 1376 t0 = tcg_temp_new(); 1377 t1 = tcg_temp_new_i64(); 1378 gen_addr_reg_index(ctx, t0); 1379 gen_qemu_ld64_i64(ctx, t1, t0); 1380 set_fpr(rd, t1); 1381 gen_addr_add(ctx, t0, t0, 8); 1382 gen_qemu_ld64_i64(ctx, t1, t0); 1383 set_fpr((rd + 1) % 32, t1); 1384 tcg_temp_free(t0); 1385 tcg_temp_free_i64(t1); 1386} 1387 1388/* stfq */ 1389static void gen_stfq(DisasContext *ctx) 1390{ 1391 int rd = rD(ctx->opcode); 1392 TCGv t0; 1393 TCGv_i64 t1; 1394 gen_set_access_type(ctx, ACCESS_FLOAT); 1395 t0 = tcg_temp_new(); 1396 t1 = tcg_temp_new_i64(); 1397 gen_addr_imm_index(ctx, t0, 0); 1398 get_fpr(t1, rd); 1399 gen_qemu_st64_i64(ctx, t1, t0); 1400 gen_addr_add(ctx, t0, t0, 8); 1401 get_fpr(t1, (rd + 1) % 32); 1402 gen_qemu_st64_i64(ctx, t1, t0); 1403 tcg_temp_free(t0); 1404 tcg_temp_free_i64(t1); 1405} 1406 1407/* stfqu */ 1408static void gen_stfqu(DisasContext *ctx) 1409{ 1410 int ra = rA(ctx->opcode); 1411 int rd = rD(ctx->opcode); 1412 TCGv t0, t1; 1413 TCGv_i64 t2; 1414 gen_set_access_type(ctx, ACCESS_FLOAT); 1415 t2 = tcg_temp_new_i64(); 1416 t0 = tcg_temp_new(); 1417 gen_addr_imm_index(ctx, t0, 0); 1418 get_fpr(t2, rd); 1419 gen_qemu_st64_i64(ctx, t2, t0); 1420 t1 = tcg_temp_new(); 1421 gen_addr_add(ctx, t1, t0, 8); 1422 get_fpr(t2, (rd + 1) % 32); 1423 gen_qemu_st64_i64(ctx, t2, t1); 1424 tcg_temp_free(t1); 1425 if (ra != 0) { 1426 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1427 } 1428 tcg_temp_free(t0); 1429 tcg_temp_free_i64(t2); 1430} 1431 1432/* stfqux */ 1433static void gen_stfqux(DisasContext *ctx) 1434{ 1435 int ra = rA(ctx->opcode); 1436 int rd = rD(ctx->opcode); 1437 TCGv t0, t1; 1438 TCGv_i64 t2; 1439 gen_set_access_type(ctx, ACCESS_FLOAT); 1440 t2 = tcg_temp_new_i64(); 1441 t0 = tcg_temp_new(); 1442 gen_addr_reg_index(ctx, t0); 1443 get_fpr(t2, rd); 1444 gen_qemu_st64_i64(ctx, t2, t0); 1445 t1 = tcg_temp_new(); 1446 gen_addr_add(ctx, t1, t0, 8); 1447 get_fpr(t2, (rd + 1) % 32); 1448 gen_qemu_st64_i64(ctx, t2, t1); 1449 tcg_temp_free(t1); 1450 if (ra != 0) { 1451 tcg_gen_mov_tl(cpu_gpr[ra], t0); 1452 } 1453 tcg_temp_free(t0); 1454 tcg_temp_free_i64(t2); 1455} 1456 1457/* stfqx */ 1458static void gen_stfqx(DisasContext *ctx) 1459{ 1460 int rd = rD(ctx->opcode); 1461 TCGv t0; 1462 TCGv_i64 t1; 1463 gen_set_access_type(ctx, ACCESS_FLOAT); 1464 t1 = tcg_temp_new_i64(); 1465 t0 = tcg_temp_new(); 1466 gen_addr_reg_index(ctx, t0); 1467 get_fpr(t1, rd); 1468 gen_qemu_st64_i64(ctx, t1, t0); 1469 gen_addr_add(ctx, t0, t0, 8); 1470 get_fpr(t1, (rd + 1) % 32); 1471 gen_qemu_st64_i64(ctx, t1, t0); 1472 tcg_temp_free(t0); 1473 tcg_temp_free_i64(t1); 1474} 1475 1476#undef _GEN_FLOAT_ACB 1477#undef GEN_FLOAT_ACB 1478#undef _GEN_FLOAT_AB 1479#undef GEN_FLOAT_AB 1480#undef _GEN_FLOAT_AC 1481#undef GEN_FLOAT_AC 1482#undef GEN_FLOAT_B 1483#undef GEN_FLOAT_BS 1484 1485#undef GEN_LDF 1486#undef GEN_LDUF 1487#undef GEN_LDUXF 1488#undef GEN_LDXF 1489#undef GEN_LDFS 1490 1491#undef GEN_STF 1492#undef GEN_STUF 1493#undef GEN_STUXF 1494#undef GEN_STXF 1495#undef GEN_STFS