@@ -93,10 +93,6 @@ DEF(brcond2_i32, 0, 4, 2,
TCG_OPF_BB_END | TCG_OPF_COND_BRANCH | IMPL(TCG_TARGET_REG_BITS == 32))
DEF(setcond2_i32, 1, 4, 1, IMPL(TCG_TARGET_REG_BITS == 32))
-DEF(ext8s_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext8s_i32))
-DEF(ext16s_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext16s_i32))
-DEF(ext8u_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext8u_i32))
-DEF(ext16u_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_ext16u_i32))
DEF(bswap16_i32, 1, 1, 1, IMPL(TCG_TARGET_HAS_bswap(TCG_TYPE_I32)))
DEF(bswap32_i32, 1, 1, 1, IMPL(TCG_TARGET_HAS_bswap(TCG_TYPE_I32)))
DEF(not_i32, 1, 1, 0, IMPL(TCG_TARGET_HAS_not(TCG_TYPE_I32)))
@@ -157,12 +153,6 @@ DEF(extrl_i64_i32, 1, 1, 0, TCG_TARGET_REG_BITS == 32 ? TCG_OPF_NOT_PRESENT : 0)
DEF(extrh_i64_i32, 1, 1, 0, TCG_TARGET_REG_BITS == 32 ? TCG_OPF_NOT_PRESENT : 0)
DEF(brcond_i64, 0, 2, 2, TCG_OPF_BB_END | TCG_OPF_COND_BRANCH | IMPL64)
-DEF(ext8s_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext8s_i64))
-DEF(ext16s_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext16s_i64))
-DEF(ext32s_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext32s_i64))
-DEF(ext8u_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext8u_i64))
-DEF(ext16u_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext16u_i64))
-DEF(ext32u_i64, 1, 1, 0, IMPL64 | IMPL(TCG_TARGET_HAS_ext32u_i64))
DEF(bswap16_i64, 1, 1, 1, IMPL64 | IMPL(TCG_TARGET_HAS_bswap(TCG_TYPE_I64)))
DEF(bswap32_i64, 1, 1, 1, IMPL64 | IMPL(TCG_TARGET_HAS_bswap(TCG_TYPE_I64)))
DEF(bswap64_i64, 1, 1, 1, IMPL64 | IMPL(TCG_TARGET_HAS_bswap(TCG_TYPE_I64)))
@@ -37,19 +37,8 @@
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) 1
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-
/*
* Without FEAT_LSE2, we must use LDXP+STXP to implement atomic 128-bit load,
* which requires writable pages. We must defer to the helper for user-only,
@@ -48,14 +48,8 @@ extern bool use_neon_instructions;
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) (T >= TCG_TYPE_V64)
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 0 /* and r0, r1, #0xff */
-#define TCG_TARGET_HAS_ext16u_i32 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
#define TCG_TARGET_HAS_qemu_ldst_i128 0
-
#define TCG_TARGET_HAS_tst 1
#define TCG_TARGET_HAS_v64 use_neon_instructions
@@ -49,18 +49,7 @@
#define TCG_TARGET_HAS_not(T) (T <= TCG_TYPE_REG || have_avx512vl)
#define TCG_TARGET_HAS_orc(T) (T >= TCG_TYPE_V64 && have_avx512vl)
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
-
#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
#else
#define TCG_TARGET_HAS_qemu_st8_i32 1
@@ -34,24 +34,8 @@
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) 1
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
-#define TCG_TARGET_HAS_brcond2 0
-#define TCG_TARGET_HAS_setcond2 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
-/* 64-bit operations */
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-
#define TCG_TARGET_HAS_qemu_ldst_i128 (cpuinfo & CPUINFO_LSX)
-
#define TCG_TARGET_HAS_tst 0
#define TCG_TARGET_HAS_v64 (cpuinfo & CPUINFO_LSX)
@@ -63,30 +63,7 @@ extern bool use_mips32r2_instructions;
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) 0
-#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-#endif
-
-/* optional instructions detected at runtime */
-#define TCG_TARGET_HAS_ext8s_i32 use_mips32r2_instructions
-#define TCG_TARGET_HAS_ext16s_i32 use_mips32r2_instructions
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
-#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_ext8s_i64 use_mips32r2_instructions
-#define TCG_TARGET_HAS_ext16s_i64 use_mips32r2_instructions
-#endif
-
-/* optional instructions automatically implemented */
-#define TCG_TARGET_HAS_ext8u_i32 0 /* andi rt, rs, 0xff */
-#define TCG_TARGET_HAS_ext16u_i32 0 /* andi rt, rs, 0xffff */
-
-#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_ext8u_i64 0 /* andi rt, rs, 0xff */
-#define TCG_TARGET_HAS_ext16u_i64 0 /* andi rt, rs, 0xffff */
-#endif
-
#define TCG_TARGET_HAS_qemu_ldst_i128 0
#define TCG_TARGET_HAS_tst 0
@@ -41,23 +41,8 @@
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) (T <= TCG_TYPE_REG || have_isa_2_07)
-/* optional instructions automatically implemented */
-#define TCG_TARGET_HAS_ext8u_i32 0 /* andi */
-#define TCG_TARGET_HAS_ext16u_i32 0
-
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
-#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 0
-#define TCG_TARGET_HAS_ext16u_i64 0
-#define TCG_TARGET_HAS_ext32u_i64 0
-#endif
-
#define TCG_TARGET_HAS_qemu_ldst_i128 \
(TCG_TARGET_REG_BITS == 64 && have_isa_2_07)
@@ -34,23 +34,8 @@
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) (T <= TCG_TYPE_REG && (cpuinfo & CPUINFO_ZBB))
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
-#define TCG_TARGET_HAS_brcond2 1
-#define TCG_TARGET_HAS_setcond2 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-
#define TCG_TARGET_HAS_qemu_ldst_i128 0
-
#define TCG_TARGET_HAS_tst 0
/* vector instructions */
@@ -52,21 +52,8 @@ extern uint64_t s390_facilities[3];
#define TCG_TARGET_HAS_not(T) (T >= TCG_TYPE_V64 || HAVE_FACILITY(MISC_INSN_EXT3))
#define TCG_TARGET_HAS_orc(T) (T >= TCG_TYPE_V64 ? HAVE_FACILITY(VECTOR_ENH1) : HAVE_FACILITY(MISC_INSN_EXT3))
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-
#define TCG_TARGET_HAS_qemu_ldst_i128 1
-
#define TCG_TARGET_HAS_tst 1
#define TCG_TARGET_HAS_v64 HAVE_FACILITY(VECTOR)
@@ -38,21 +38,8 @@ extern bool use_vis3_instructions;
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) 1
-#define TCG_TARGET_HAS_ext8s_i32 0
-#define TCG_TARGET_HAS_ext16s_i32 0
-#define TCG_TARGET_HAS_ext8u_i32 0
-#define TCG_TARGET_HAS_ext16u_i32 0
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
-#define TCG_TARGET_HAS_ext8s_i64 0
-#define TCG_TARGET_HAS_ext16s_i64 0
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 0
-#define TCG_TARGET_HAS_ext16u_i64 0
-#define TCG_TARGET_HAS_ext32u_i64 1
-
#define TCG_TARGET_HAS_qemu_ldst_i128 0
-
#define TCG_TARGET_HAS_tst 1
#define TCG_TARGET_extract_valid(type, ofs, len) \
@@ -9,16 +9,6 @@
#include "tcg-target-has.h"
-#if TCG_TARGET_REG_BITS == 32
-/* Turn some undef macros into false macros. */
-#define TCG_TARGET_HAS_ext8s_i64 0
-#define TCG_TARGET_HAS_ext16s_i64 0
-#define TCG_TARGET_HAS_ext32s_i64 0
-#define TCG_TARGET_HAS_ext8u_i64 0
-#define TCG_TARGET_HAS_ext16u_i64 0
-#define TCG_TARGET_HAS_ext32u_i64 0
-#endif
-
/* Only one of DIV or DIV2 should be defined. */
#if defined(TCG_TARGET_HAS_div)
#define TCG_TARGET_HAS_div2(T) 0
@@ -32,23 +32,8 @@
#define TCG_TARGET_HAS_not(T) 1
#define TCG_TARGET_HAS_orc(T) 1
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
#define TCG_TARGET_HAS_qemu_st8_i32 0
-
-#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-#endif /* TCG_TARGET_REG_BITS == 64 */
-
#define TCG_TARGET_HAS_qemu_ldst_i128 0
-
#define TCG_TARGET_HAS_tst 1
#define TCG_TARGET_extract_valid(type, ofs, len) 1
@@ -501,18 +501,6 @@ static uint64_t do_constant_folding_2(TCGOpcode op, uint64_t x, uint64_t y)
case INDEX_op_ctpop_i64:
return ctpop64(x);
- CASE_OP_32_64(ext8s):
- return (int8_t)x;
-
- CASE_OP_32_64(ext16s):
- return (int16_t)x;
-
- CASE_OP_32_64(ext8u):
- return (uint8_t)x;
-
- CASE_OP_32_64(ext16u):
- return (uint16_t)x;
-
CASE_OP_32_64(bswap16):
x = bswap16(x);
return y & TCG_BSWAP_OS ? (int16_t)x : x;
@@ -525,12 +513,10 @@ static uint64_t do_constant_folding_2(TCGOpcode op, uint64_t x, uint64_t y)
return bswap64(x);
case INDEX_op_ext_i32_i64:
- case INDEX_op_ext32s_i64:
return (int32_t)x;
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
- case INDEX_op_ext32u_i64:
return (uint32_t)x;
case INDEX_op_extrh_i64_i32:
@@ -1856,8 +1842,7 @@ static bool fold_extract2(OptContext *ctx, TCGOp *op)
static bool fold_exts(OptContext *ctx, TCGOp *op)
{
- uint64_t s_mask_old, s_mask, z_mask;
- bool type_change = false;
+ uint64_t s_mask, z_mask;
TempOptInfo *t1;
if (fold_const1(ctx, op)) {
@@ -1867,72 +1852,38 @@ static bool fold_exts(OptContext *ctx, TCGOp *op)
t1 = arg_info(op->args[1]);
z_mask = t1->z_mask;
s_mask = t1->s_mask;
- s_mask_old = s_mask;
switch (op->opc) {
- CASE_OP_32_64(ext8s):
- s_mask |= INT8_MIN;
- z_mask = (int8_t)z_mask;
- break;
- CASE_OP_32_64(ext16s):
- s_mask |= INT16_MIN;
- z_mask = (int16_t)z_mask;
- break;
case INDEX_op_ext_i32_i64:
- type_change = true;
- QEMU_FALLTHROUGH;
- case INDEX_op_ext32s_i64:
s_mask |= INT32_MIN;
z_mask = (int32_t)z_mask;
break;
default:
g_assert_not_reached();
}
-
- if (!type_change && fold_affected_mask(ctx, op, s_mask & ~s_mask_old)) {
- return true;
- }
-
return fold_masks_zs(ctx, op, z_mask, s_mask);
}
static bool fold_extu(OptContext *ctx, TCGOp *op)
{
- uint64_t z_mask_old, z_mask;
- bool type_change = false;
+ uint64_t z_mask;
if (fold_const1(ctx, op)) {
return true;
}
- z_mask_old = z_mask = arg_info(op->args[1])->z_mask;
-
+ z_mask = arg_info(op->args[1])->z_mask;
switch (op->opc) {
- CASE_OP_32_64(ext8u):
- z_mask = (uint8_t)z_mask;
- break;
- CASE_OP_32_64(ext16u):
- z_mask = (uint16_t)z_mask;
- break;
case INDEX_op_extrl_i64_i32:
case INDEX_op_extu_i32_i64:
- type_change = true;
- QEMU_FALLTHROUGH;
- case INDEX_op_ext32u_i64:
z_mask = (uint32_t)z_mask;
break;
case INDEX_op_extrh_i64_i32:
- type_change = true;
z_mask >>= 32;
break;
default:
g_assert_not_reached();
}
-
- if (!type_change && fold_affected_mask(ctx, op, z_mask_old ^ z_mask)) {
- return true;
- }
-
return fold_masks_z(ctx, op, z_mask);
}
@@ -2941,15 +2892,9 @@ void tcg_optimize(TCGContext *s)
CASE_OP_32_64(extract2):
done = fold_extract2(&ctx, op);
break;
- CASE_OP_32_64(ext8s):
- CASE_OP_32_64(ext16s):
- case INDEX_op_ext32s_i64:
case INDEX_op_ext_i32_i64:
done = fold_exts(&ctx, op);
break;
- CASE_OP_32_64(ext8u):
- CASE_OP_32_64(ext16u):
- case INDEX_op_ext32u_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
@@ -408,17 +408,19 @@ void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
case -1:
tcg_gen_mov_i32(ret, arg1);
return;
- case 0xff:
- /* Don't recurse with tcg_gen_ext8u_i32. */
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
- return;
- }
- break;
- case 0xffff:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
- return;
+ default:
+ /*
+ * Canonicalize on extract, if valid. This aids x86 with its
+ * 2 operand MOVZBL and 2 operand AND, selecting the TCGOpcode
+ * which does not require matching operands. Other backends can
+ * trivially expand the extract to AND during code generation.
+ */
+ if (!(arg2 & (arg2 + 1))) {
+ unsigned len = ctz32(~arg2);
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, len)) {
+ tcg_gen_extract_i32(ret, arg1, 0, len);
+ return;
+ }
}
break;
}
@@ -953,40 +955,20 @@ void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
TCGv_i32 zero = tcg_constant_i32(0);
tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
} else {
- /* To help two-operand hosts we prefer to zero-extend first,
- which allows ARG to stay live. */
- switch (len) {
- case 16:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_ext16u_i32(ret, arg);
- tcg_gen_shli_i32(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_ext8u_i32(ret, arg);
- tcg_gen_shli_i32(ret, ret, ofs);
- return;
- }
- break;
+ /*
+ * To help two-operand hosts we prefer to zero-extend first,
+ * which allows ARG to stay live.
+ */
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, len)) {
+ tcg_gen_extract_i32(ret, arg, 0, len);
+ tcg_gen_shli_i32(ret, ret, ofs);
+ return;
}
/* Otherwise prefer zero-extension over AND for code size. */
- switch (ofs + len) {
- case 16:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_shli_i32(ret, arg, ofs);
- tcg_gen_ext16u_i32(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_shli_i32(ret, arg, ofs);
- tcg_gen_ext8u_i32(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, ofs + len)) {
+ tcg_gen_shli_i32(ret, arg, ofs);
+ tcg_gen_extract_i32(ret, ret, 0, ofs + len);
+ return;
}
tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
tcg_gen_shli_i32(ret, ret, ofs);
@@ -1006,32 +988,21 @@ void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
tcg_gen_shri_i32(ret, arg, 32 - len);
return;
}
- if (ofs == 0) {
- tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
- return;
- }
if (TCG_TARGET_extract_valid(TCG_TYPE_I32, ofs, len)) {
tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
return;
}
+ if (ofs == 0) {
+ tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
+ return;
+ }
/* Assume that zero-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 16:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_ext16u_i32(ret, arg);
- tcg_gen_shri_i32(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_ext8u_i32(ret, arg);
- tcg_gen_shri_i32(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, ofs + len)) {
+ tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, 0, ofs + len);
+ tcg_gen_shri_i32(ret, ret, ofs);
+ return;
}
/* ??? Ideally we'd know what values are available for immediate AND.
@@ -1062,16 +1033,6 @@ void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
tcg_gen_sari_i32(ret, arg, 32 - len);
return;
}
- if (ofs == 0) {
- switch (len) {
- case 16:
- tcg_gen_ext16s_i32(ret, arg);
- return;
- case 8:
- tcg_gen_ext8s_i32(ret, arg);
- return;
- }
- }
if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, ofs, len)) {
tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
@@ -1079,37 +1040,15 @@ void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
}
/* Assume that sign-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 16:
- if (TCG_TARGET_HAS_ext16s_i32) {
- tcg_gen_ext16s_i32(ret, arg);
- tcg_gen_sari_i32(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i32) {
- tcg_gen_ext8s_i32(ret, arg);
- tcg_gen_sari_i32(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, 0, ofs + len)) {
+ tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, 0, ofs + len);
+ tcg_gen_sari_i32(ret, ret, ofs);
+ return;
}
- switch (len) {
- case 16:
- if (TCG_TARGET_HAS_ext16s_i32) {
- tcg_gen_shri_i32(ret, arg, ofs);
- tcg_gen_ext16s_i32(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i32) {
- tcg_gen_shri_i32(ret, arg, ofs);
- tcg_gen_ext8s_i32(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, 0, len)) {
+ tcg_gen_shri_i32(ret, arg, ofs);
+ tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, ret, 0, len);
+ return;
}
tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
@@ -1279,40 +1218,22 @@ void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext8s_i32) {
- tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
- } else {
- tcg_gen_shli_i32(ret, arg, 24);
- tcg_gen_sari_i32(ret, ret, 24);
- }
+ tcg_gen_sextract_i32(ret, arg, 0, 8);
}
void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext16s_i32) {
- tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
- } else {
- tcg_gen_shli_i32(ret, arg, 16);
- tcg_gen_sari_i32(ret, ret, 16);
- }
+ tcg_gen_sextract_i32(ret, arg, 0, 16);
}
void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
- } else {
- tcg_gen_andi_i32(ret, arg, 0xffu);
- }
+ tcg_gen_extract_i32(ret, arg, 0, 8);
}
void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
- } else {
- tcg_gen_andi_i32(ret, arg, 0xffffu);
- }
+ tcg_gen_extract_i32(ret, arg, 0, 16);
}
/*
@@ -1792,23 +1713,19 @@ void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
case -1:
tcg_gen_mov_i64(ret, arg1);
return;
- case 0xff:
- /* Don't recurse with tcg_gen_ext8u_i64. */
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
- return;
- }
- break;
- case 0xffff:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
- return;
- }
- break;
- case 0xffffffffu:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
- return;
+ default:
+ /*
+ * Canonicalize on extract, if valid. This aids x86 with its
+ * 2 operand MOVZBL and 2 operand AND, selecting the TCGOpcode
+ * which does not require matching operands. Other backends can
+ * trivially expand the extract to AND during code generation.
+ */
+ if (!(arg2 & (arg2 + 1))) {
+ unsigned len = ctz64(~arg2);
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, len)) {
+ tcg_gen_extract_i64(ret, arg1, 0, len);
+ return;
+ }
}
break;
}
@@ -2136,77 +2053,32 @@ void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
- } else if (TCG_TARGET_HAS_ext8s_i64) {
- tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
- } else {
- tcg_gen_shli_i64(ret, arg, 56);
- tcg_gen_sari_i64(ret, ret, 56);
- }
+ tcg_gen_sextract_i64(ret, arg, 0, 8);
}
void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
- } else if (TCG_TARGET_HAS_ext16s_i64) {
- tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
- } else {
- tcg_gen_shli_i64(ret, arg, 48);
- tcg_gen_sari_i64(ret, ret, 48);
- }
+ tcg_gen_sextract_i64(ret, arg, 0, 16);
}
void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
- } else if (TCG_TARGET_HAS_ext32s_i64) {
- tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
- } else {
- tcg_gen_shli_i64(ret, arg, 32);
- tcg_gen_sari_i64(ret, ret, 32);
- }
+ tcg_gen_sextract_i64(ret, arg, 0, 32);
}
void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
- } else if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
- } else {
- tcg_gen_andi_i64(ret, arg, 0xffu);
- }
+ tcg_gen_extract_i64(ret, arg, 0, 8);
}
void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
- } else if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
- } else {
- tcg_gen_andi_i64(ret, arg, 0xffffu);
- }
+ tcg_gen_extract_i64(ret, arg, 0, 16);
}
void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
- } else if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
- } else {
- tcg_gen_andi_i64(ret, arg, 0xffffffffu);
- }
+ tcg_gen_extract_i64(ret, arg, 0, 32);
}
/*
@@ -2757,54 +2629,20 @@ void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
return;
}
}
- /* To help two-operand hosts we prefer to zero-extend first,
- which allows ARG to stay live. */
- switch (len) {
- case 32:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_ext32u_i64(ret, arg);
- tcg_gen_shli_i64(ret, ret, ofs);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_ext16u_i64(ret, arg);
- tcg_gen_shli_i64(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_ext8u_i64(ret, arg);
- tcg_gen_shli_i64(ret, ret, ofs);
- return;
- }
- break;
+ /*
+ * To help two-operand hosts we prefer to zero-extend first,
+ * which allows ARG to stay live.
+ */
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, len)) {
+ tcg_gen_extract_i64(ret, arg, 0, len);
+ tcg_gen_shli_i64(ret, ret, ofs);
+ return;
}
/* Otherwise prefer zero-extension over AND for code size. */
- switch (ofs + len) {
- case 32:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_shli_i64(ret, arg, ofs);
- tcg_gen_ext32u_i64(ret, ret);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_shli_i64(ret, arg, ofs);
- tcg_gen_ext16u_i64(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_shli_i64(ret, arg, ofs);
- tcg_gen_ext8u_i64(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, ofs + len)) {
+ tcg_gen_shli_i64(ret, arg, ofs);
+ tcg_gen_extract_i64(ret, ret, 0, ofs + len);
+ return;
}
tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
tcg_gen_shli_i64(ret, ret, ofs);
@@ -2824,10 +2662,6 @@ void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
tcg_gen_shri_i64(ret, arg, 64 - len);
return;
}
- if (ofs == 0) {
- tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
- return;
- }
if (TCG_TARGET_REG_BITS == 32) {
/* Look for a 32-bit extract within one of the two words. */
@@ -2841,39 +2675,34 @@ void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
return;
}
- /* The field is split across two words. One double-word
- shift is better than two double-word shifts. */
- goto do_shift_and;
+
+ /* The field is split across two words. */
+ tcg_gen_extract2_i32(TCGV_LOW(ret), TCGV_LOW(arg),
+ TCGV_HIGH(arg), ofs);
+ if (len <= 32) {
+ tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(ret), 0, len);
+ tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ } else {
+ tcg_gen_extract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg),
+ ofs, len - 32);
+ }
+ return;
}
if (TCG_TARGET_extract_valid(TCG_TYPE_I64, ofs, len)) {
tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
return;
}
+ if (ofs == 0) {
+ tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
+ return;
+ }
/* Assume that zero-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 32:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_ext32u_i64(ret, arg);
- tcg_gen_shri_i64(ret, ret, ofs);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_ext16u_i64(ret, arg);
- tcg_gen_shri_i64(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_ext8u_i64(ret, arg);
- tcg_gen_shri_i64(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, ofs + len)) {
+ tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, 0, ofs + len);
+ tcg_gen_shri_i64(ret, ret, ofs);
+ return;
}
/* ??? Ideally we'd know what values are available for immediate AND.
@@ -2881,7 +2710,6 @@ void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
so that we get ext8u, ext16u, and ext32u. */
switch (len) {
case 1 ... 8: case 16: case 32:
- do_shift_and:
tcg_gen_shri_i64(ret, arg, ofs);
tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
break;
@@ -2905,19 +2733,6 @@ void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
tcg_gen_sari_i64(ret, arg, 64 - len);
return;
}
- if (ofs == 0) {
- switch (len) {
- case 32:
- tcg_gen_ext32s_i64(ret, arg);
- return;
- case 16:
- tcg_gen_ext16s_i64(ret, arg);
- return;
- case 8:
- tcg_gen_ext8s_i64(ret, arg);
- return;
- }
- }
if (TCG_TARGET_REG_BITS == 32) {
/* Look for a 32-bit extract within one of the two words. */
@@ -2957,52 +2772,17 @@ void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
}
/* Assume that sign-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 32:
- if (TCG_TARGET_HAS_ext32s_i64) {
- tcg_gen_ext32s_i64(ret, arg);
- tcg_gen_sari_i64(ret, ret, ofs);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16s_i64) {
- tcg_gen_ext16s_i64(ret, arg);
- tcg_gen_sari_i64(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i64) {
- tcg_gen_ext8s_i64(ret, arg);
- tcg_gen_sari_i64(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, 0, ofs + len)) {
+ tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, 0, ofs + len);
+ tcg_gen_sari_i64(ret, ret, ofs);
+ return;
}
- switch (len) {
- case 32:
- if (TCG_TARGET_HAS_ext32s_i64) {
- tcg_gen_shri_i64(ret, arg, ofs);
- tcg_gen_ext32s_i64(ret, ret);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16s_i64) {
- tcg_gen_shri_i64(ret, arg, ofs);
- tcg_gen_ext16s_i64(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i64) {
- tcg_gen_shri_i64(ret, arg, ofs);
- tcg_gen_ext8s_i64(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, 0, len)) {
+ tcg_gen_shri_i64(ret, arg, ofs);
+ tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, ret, 0, len);
+ return;
}
+
tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
tcg_gen_sari_i64(ret, ret, 64 - len);
}
@@ -2239,14 +2239,6 @@ bool tcg_op_supported(TCGOpcode op, TCGType type)
return TCG_TARGET_HAS_muluh(TCG_TYPE_I32);
case INDEX_op_mulsh_i32:
return TCG_TARGET_HAS_mulsh(TCG_TYPE_I32);
- case INDEX_op_ext8s_i32:
- return TCG_TARGET_HAS_ext8s_i32;
- case INDEX_op_ext16s_i32:
- return TCG_TARGET_HAS_ext16s_i32;
- case INDEX_op_ext8u_i32:
- return TCG_TARGET_HAS_ext8u_i32;
- case INDEX_op_ext16u_i32:
- return TCG_TARGET_HAS_ext16u_i32;
case INDEX_op_bswap16_i32:
case INDEX_op_bswap32_i32:
return TCG_TARGET_HAS_bswap(TCG_TYPE_I32);
@@ -2323,18 +2315,6 @@ bool tcg_op_supported(TCGOpcode op, TCGType type)
return TCG_TARGET_REG_BITS == 64 && TCG_TARGET_HAS_rot(TCG_TYPE_I64);
case INDEX_op_extract2_i64:
return TCG_TARGET_REG_BITS == 64 && TCG_TARGET_HAS_extract2(TCG_TYPE_I64);
- case INDEX_op_ext8s_i64:
- return TCG_TARGET_HAS_ext8s_i64;
- case INDEX_op_ext16s_i64:
- return TCG_TARGET_HAS_ext16s_i64;
- case INDEX_op_ext32s_i64:
- return TCG_TARGET_HAS_ext32s_i64;
- case INDEX_op_ext8u_i64:
- return TCG_TARGET_HAS_ext8u_i64;
- case INDEX_op_ext16u_i64:
- return TCG_TARGET_HAS_ext16u_i64;
- case INDEX_op_ext32u_i64:
- return TCG_TARGET_HAS_ext32u_i64;
case INDEX_op_bswap16_i64:
case INDEX_op_bswap32_i64:
case INDEX_op_bswap64_i64:
@@ -5364,32 +5344,6 @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
/* emit instruction */
switch (op->opc) {
- case INDEX_op_ext8s_i32:
- tcg_out_ext8s(s, TCG_TYPE_I32, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext8s_i64:
- tcg_out_ext8s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- tcg_out_ext8u(s, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext16s_i32:
- tcg_out_ext16s(s, TCG_TYPE_I32, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext16s_i64:
- tcg_out_ext16s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- tcg_out_ext16u(s, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext32s_i64:
- tcg_out_ext32s(s, new_args[0], new_args[1]);
- break;
- case INDEX_op_ext32u_i64:
- tcg_out_ext32u(s, new_args[0], new_args[1]);
- break;
case INDEX_op_ext_i32_i64:
tcg_out_exts_i32_i64(s, new_args[0], new_args[1]);
break;
@@ -673,28 +673,6 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
tci_write_reg64(regs, r1, r0, tmp64);
break;
-#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
- CASE_32_64(ext8s)
- tci_args_rr(insn, &r0, &r1);
- regs[r0] = (int8_t)regs[r1];
- break;
-#endif
- CASE_32_64(ext16s)
- tci_args_rr(insn, &r0, &r1);
- regs[r0] = (int16_t)regs[r1];
- break;
-#if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
- CASE_32_64(ext8u)
- tci_args_rr(insn, &r0, &r1);
- regs[r0] = (uint8_t)regs[r1];
- break;
-#endif
-#if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
- CASE_32_64(ext16u)
- tci_args_rr(insn, &r0, &r1);
- regs[r0] = (uint16_t)regs[r1];
- break;
-#endif
CASE_32_64(bswap16)
tci_args_rr(insn, &r0, &r1);
regs[r0] = bswap16(regs[r1]);
@@ -823,12 +801,10 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
tb_ptr = ptr;
}
break;
- case INDEX_op_ext32s_i64:
case INDEX_op_ext_i32_i64:
tci_args_rr(insn, &r0, &r1);
regs[r0] = (int32_t)regs[r1];
break;
- case INDEX_op_ext32u_i64:
case INDEX_op_extu_i32_i64:
tci_args_rr(insn, &r0, &r1);
regs[r0] = (uint32_t)regs[r1];
@@ -1095,15 +1071,6 @@ int print_insn_tci(bfd_vma addr, disassemble_info *info)
case INDEX_op_mov_i32:
case INDEX_op_mov_i64:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_bswap16_i32:
@@ -396,20 +396,6 @@ Misc
- | *t0* = *t1*
| Move *t1* to *t0* (both operands must have the same type).
- * - ext8s_i32/i64 *t0*, *t1*
-
- ext8u_i32/i64 *t0*, *t1*
-
- ext16s_i32/i64 *t0*, *t1*
-
- ext16u_i32/i64 *t0*, *t1*
-
- ext32s_i64 *t0*, *t1*
-
- ext32u_i64 *t0*, *t1*
-
- - | 8, 16 or 32 bit sign/zero extension (both operands must have the same type)
-
* - bswap16_i32/i64 *t0*, *t1*, *flags*
- | 16 bit byte swap on the low bits of a 32/64 bit input.
@@ -2511,17 +2511,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -2998,16 +2988,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_bswap16_i64:
case INDEX_op_bswap32_i64:
case INDEX_op_bswap64_i64:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -2157,10 +2157,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16u_i32:
default:
g_assert_not_reached();
}
@@ -2181,9 +2177,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_not_i32:
case INDEX_op_bswap16_i32:
case INDEX_op_bswap32_i32:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16u_i32:
case INDEX_op_extract_i32:
case INDEX_op_sextract_i32:
return C_O1_I1(r, r);
@@ -3110,17 +3110,7 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -3756,18 +3746,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_extrh_i64_i32:
return C_O1_I1(r, 0);
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- return C_O1_I1(r, q);
-
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -1705,17 +1705,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -2250,16 +2240,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_brcond_i64:
return C_O0_I2(rZ, rZ);
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
@@ -650,7 +650,7 @@ static void tcg_out_movi(TCGContext *s, TCGType type,
static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
{
- tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
+ tcg_debug_assert(use_mips32r2_instructions);
tcg_out_opc_reg(s, OPC_SEB, rd, TCG_REG_ZERO, rs);
}
@@ -661,7 +661,7 @@ static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
{
- tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
+ tcg_debug_assert(use_mips32r2_instructions);
tcg_out_opc_reg(s, OPC_SEH, rd, TCG_REG_ZERO, rs);
}
@@ -2160,15 +2160,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -2191,8 +2183,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_not_i32:
case INDEX_op_bswap16_i32:
case INDEX_op_bswap32_i32:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext16s_i32:
case INDEX_op_extract_i32:
case INDEX_op_sextract_i32:
case INDEX_op_ld8u_i64:
@@ -2207,10 +2197,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_bswap16_i64:
case INDEX_op_bswap32_i64:
case INDEX_op_bswap64_i64:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -3550,17 +3550,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -4185,8 +4175,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_ctpop_i32:
case INDEX_op_neg_i32:
case INDEX_op_not_i32:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext16s_i32:
case INDEX_op_bswap16_i32:
case INDEX_op_bswap32_i32:
case INDEX_op_extract_i32:
@@ -4201,9 +4189,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_ctpop_i64:
case INDEX_op_neg_i64:
case INDEX_op_not_i64:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext32s_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -2386,17 +2386,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -2643,17 +2633,7 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_ld_i64:
case INDEX_op_not_i64:
case INDEX_op_neg_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_extu_i32_i64:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext32s_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
case INDEX_op_ext_i32_i64:
@@ -2791,17 +2791,7 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -3349,16 +3339,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_neg_i64:
case INDEX_op_not_i32:
case INDEX_op_not_i64:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -1527,17 +1527,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@@ -1567,8 +1557,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_neg_i64:
case INDEX_op_not_i32:
case INDEX_op_not_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -58,16 +58,6 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_not_i64:
case INDEX_op_neg_i32:
case INDEX_op_neg_i64:
- case INDEX_op_ext8s_i32:
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
@@ -558,76 +548,54 @@ static void tcg_out_movi(TCGContext *s, TCGType type,
}
}
+static void tcg_out_extract(TCGContext *s, TCGType type, TCGReg rd,
+ TCGReg rs, unsigned pos, unsigned len)
+{
+ TCGOpcode opc = type == TCG_TYPE_I32 ?
+ INDEX_op_extract_i32 :
+ INDEX_op_extract_i64;
+ tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
+}
+
+static void tcg_out_sextract(TCGContext *s, TCGType type, TCGReg rd,
+ TCGReg rs, unsigned pos, unsigned len)
+{
+ TCGOpcode opc = type == TCG_TYPE_I32 ?
+ INDEX_op_sextract_i32 :
+ INDEX_op_sextract_i64;
+ tcg_out_op_rrbb(s, opc, rd, rs, pos, len);
+}
+
static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
{
- switch (type) {
- case TCG_TYPE_I32:
- tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
- tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs);
- break;
-#if TCG_TARGET_REG_BITS == 64
- case TCG_TYPE_I64:
- tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64);
- tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs);
- break;
-#endif
- default:
- g_assert_not_reached();
- }
+ tcg_out_sextract(s, type, rd, rs, 0, 8);
}
static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
{
- if (TCG_TARGET_REG_BITS == 64) {
- tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64);
- tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs);
- } else {
- tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32);
- tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs);
- }
+ tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 8);
}
static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
{
- switch (type) {
- case TCG_TYPE_I32:
- tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
- tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs);
- break;
-#if TCG_TARGET_REG_BITS == 64
- case TCG_TYPE_I64:
- tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64);
- tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs);
- break;
-#endif
- default:
- g_assert_not_reached();
- }
+ tcg_out_sextract(s, type, rd, rs, 0, 16);
}
static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
{
- if (TCG_TARGET_REG_BITS == 64) {
- tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64);
- tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs);
- } else {
- tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32);
- tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs);
- }
+ tcg_out_extract(s, TCG_TYPE_REG, rd, rs, 0, 16);
}
static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
{
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
- tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64);
- tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs);
+ tcg_out_sextract(s, TCG_TYPE_I64, rd, rs, 0, 32);
}
static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
{
tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
- tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64);
- tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs);
+ tcg_out_extract(s, TCG_TYPE_I64, rd, rs, 0, 32);
}
static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
@@ -713,7 +681,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
const TCGArg args[TCG_MAX_OP_ARGS],
const int const_args[TCG_MAX_OP_ARGS])
{
- TCGOpcode exts;
+ int width;
switch (opc) {
case INDEX_op_goto_ptr:
@@ -803,18 +771,18 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap). */
- exts = INDEX_op_ext16s_i32;
+ width = 16;
goto do_bswap;
case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap). */
- exts = INDEX_op_ext16s_i64;
+ width = 16;
goto do_bswap;
case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap). */
- exts = INDEX_op_ext32s_i64;
+ width = 32;
do_bswap:
/* The base tci bswaps zero-extend, and ignore high bits. */
tcg_out_op_rr(s, opc, args[0], args[1]);
if (args[2] & TCG_BSWAP_OS) {
- tcg_out_op_rr(s, exts, args[0], args[0]);
+ tcg_out_sextract(s, TCG_TYPE_REG, args[0], args[0], 0, width);
}
break;
@@ -872,17 +840,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
- case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
- case INDEX_op_ext8s_i64:
- case INDEX_op_ext8u_i32:
- case INDEX_op_ext8u_i64:
- case INDEX_op_ext16s_i32:
- case INDEX_op_ext16s_i64:
- case INDEX_op_ext16u_i32:
- case INDEX_op_ext16u_i64:
- case INDEX_op_ext32s_i64:
- case INDEX_op_ext32u_i64:
- case INDEX_op_ext_i32_i64:
+ case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
Use the fully general extract opcodes instead. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- include/tcg/tcg-opc.h | 10 - tcg/aarch64/tcg-target-has.h | 11 - tcg/arm/tcg-target-has.h | 6 - tcg/i386/tcg-target-has.h | 11 - tcg/loongarch64/tcg-target-has.h | 16 -- tcg/mips/tcg-target-has.h | 23 -- tcg/ppc/tcg-target-has.h | 15 -- tcg/riscv/tcg-target-has.h | 15 -- tcg/s390x/tcg-target-has.h | 13 - tcg/sparc64/tcg-target-has.h | 13 - tcg/tcg-has.h | 10 - tcg/tci/tcg-target-has.h | 15 -- tcg/optimize.c | 61 +---- tcg/tcg-op.c | 430 ++++++++----------------------- tcg/tcg.c | 46 ---- tcg/tci.c | 33 --- docs/devel/tcg-ops.rst | 14 - tcg/aarch64/tcg-target.c.inc | 22 +- tcg/arm/tcg-target.c.inc | 7 - tcg/i386/tcg-target.c.inc | 24 +- tcg/loongarch64/tcg-target.c.inc | 22 +- tcg/mips/tcg-target.c.inc | 20 +- tcg/ppc/tcg-target.c.inc | 17 +- tcg/riscv/tcg-target.c.inc | 22 +- tcg/s390x/tcg-target.c.inc | 22 +- tcg/sparc64/tcg-target.c.inc | 14 +- tcg/tci/tcg-target.c.inc | 102 +++----- 27 files changed, 148 insertions(+), 866 deletions(-)